[01/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 291f0a4e9 -> 8b1eaec14


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.html
index b9b7a7e..f3af6db 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestTableDescriptorBuilder.html
@@ -68,289 +68,257 @@
 060String cpName = "a.b.c.d";
 061TableDescriptor htd
 062  = 
TableDescriptorBuilder.newBuilder(TableName.META_TABLE_NAME)
-063.addCoprocessor(cpName)
-064.addCoprocessor(cpName)
+063.setCoprocessor(cpName)
+064.setCoprocessor(cpName)
 065.build();
 066  }
 067
 068  @Test
-069  public void 
testAddCoprocessorWithSpecStr() throws IOException {
-070String cpName = "a.b.c.d";
-071TableDescriptorBuilder builder
-072  = 
TableDescriptorBuilder.newBuilder(TableName.META_TABLE_NAME);
-073
-074try {
-075  
builder.addCoprocessorWithSpec(cpName);
-076  fail();
-077} catch (IllegalArgumentException 
iae) {
-078  // Expected as cpName is invalid
-079}
-080
-081// Try minimal spec.
-082try {
-083  
builder.addCoprocessorWithSpec("file:///some/path" + "|" + cpName);
-084  fail();
-085} catch (IllegalArgumentException 
iae) {
-086  // Expected to be invalid
-087}
-088
-089// Try more spec.
-090String spec = 
"hdfs:///foo.jar|com.foo.FooRegionObserver|1001|arg1=1,arg2=2";
-091try {
-092  
builder.addCoprocessorWithSpec(spec);
-093} catch (IllegalArgumentException 
iae) {
-094  fail();
-095}
-096
-097// Try double add of same 
coprocessor
-098try {
-099  
builder.addCoprocessorWithSpec(spec);
-100  fail();
-101} catch (IOException ioe) {
-102  // Expect that the coprocessor 
already exists
-103}
-104  }
-105
-106  @Test
-107  public void testPb() throws 
DeserializationException, IOException {
-108final int v = 123;
-109TableDescriptor htd
-110  = 
TableDescriptorBuilder.newBuilder(TableName.META_TABLE_NAME)
-111  .setMaxFileSize(v)
-112  
.setDurability(Durability.ASYNC_WAL)
-113  .setReadOnly(true)
-114  .setRegionReplication(2)
-115  .build();
-116
-117byte [] bytes = 
TableDescriptorBuilder.toByteArray(htd);
-118TableDescriptor deserializedHtd = 
TableDescriptorBuilder.parseFrom(bytes);
-119assertEquals(htd, deserializedHtd);
-120assertEquals(v, 
deserializedHtd.getMaxFileSize());
-121
assertTrue(deserializedHtd.isReadOnly());
-122assertEquals(Durability.ASYNC_WAL, 
deserializedHtd.getDurability());
-123assertEquals(2, 
deserializedHtd.getRegionReplication());
-124  }
-125
-126  /**
-127   * Test cps in the table description
-128   * @throws Exception
-129   */
-130  @Test
-131  public void testGetSetRemoveCP() throws 
Exception {
-132// simple CP
-133String className = 
"org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver";
-134TableDescriptor desc
-135  = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()))
-136 .addCoprocessor(className) // 
add and check that it is present
-137.build();
-138
assertTrue(desc.hasCoprocessor(className));
-139desc = 
TableDescriptorBuilder.newBuilder(desc)
-140 .removeCoprocessor(className) // 
remove it and check that it is gone
-141.build();
-142
assertFalse(desc.hasCoprocessor(className));
-143  }
-144
-145  /**
-146   * Test cps in the table description
-147   * @throws Exception
-148   */
-149  @Test
-150  public void testSetListRemoveCP() 
throws Exception {
-151TableDescriptor desc
-152  = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build();
-153// Check that any coprocessor is 
present.
-154
assertTrue(desc.getCoprocessors().isEmpty());
-155
-156// simple CP
-157String className1 = 
"org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver";
-158String className2 = 
"org.apache.hadoop.hbase.coprocessor.SampleRegionWALObserver";
-159desc = 
TableDescriptorBuilder.newBuilder(desc)
-160.addCoprocessor(className1) 
// Add the 1 coprocessor and check if present.
-161.build();
-162
assertTrue(desc.getCoprocessors().size() == 1);
-163
assertTrue(desc.getCoprocessors().contains(className1));
-164
-165desc = 
TableDescriptorBuilder.newBuilder(desc)
-166// Add the 2nd coprocessor 
and check if present.
-167// remove it and check that 
it is gone
-168.addCoprocessor(clas

[36/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index d83e763..a47005c 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -1466,12 +1466,6 @@
  
 addColumnFamily(TableName,
 ColumnFamilyDescriptor) - Method in class 
org.apache.hadoop.hbase.client.RawAsyncHBaseAdmin
  
-addColumnFamily(ColumnFamilyDescriptor)
 - Method in class org.apache.hadoop.hbase.client.TableDescriptorBuilder
- 
-addColumnFamily(ColumnFamilyDescriptor)
 - Method in class org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDescriptor
-
-Adds a column family.
-
 addColumnFamily(ColumnSchemaModel)
 - Method in class org.apache.hadoop.hbase.rest.model.TableSchemaModel
 
 Add a column family to the table descriptor
@@ -1526,18 +1520,6 @@
 
 Write the raw constraint and configuration to the 
descriptor.
 
-addCoprocessor(String)
 - Method in class org.apache.hadoop.hbase.client.TableDescriptorBuilder
- 
-addCoprocessor(String,
 Path, int, Map) - Method in class 
org.apache.hadoop.hbase.client.TableDescriptorBuilder
- 
-addCoprocessor(String)
 - Method in class org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDescriptor
-
-Add a table coprocessor to this table.
-
-addCoprocessor(String,
 Path, int, Map) - Method in class 
org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDescriptor
-
-Add a table coprocessor to this table.
-
 addCoprocessor(String)
 - Method in class org.apache.hadoop.hbase.HTableDescriptor
 
 Deprecated.
@@ -1548,16 +1530,6 @@
 Deprecated.
 Add a table coprocessor to this table.
 
-addCoprocessorToMap(String)
 - Method in class org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDescriptor
-
-Add coprocessor to values Map
-
-addCoprocessorWithSpec(String)
 - Method in class org.apache.hadoop.hbase.client.TableDescriptorBuilder
- 
-addCoprocessorWithSpec(String)
 - Method in class org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDescriptor
-
-Add a table coprocessor to this table.
-
 addCoprocessorWithSpec(String)
 - Method in class org.apache.hadoop.hbase.HTableDescriptor
 
 Deprecated.
@@ -7016,6 +6988,8 @@
 
 build()
 - Method in class org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder
  
+build()
 - Method in class org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder
+ 
 build()
 - Method in class org.apache.hadoop.hbase.client.RegionInfoBuilder
  
 build()
 - Method in class org.apache.hadoop.hbase.client.RetryingCallerInterceptorFactory
@@ -11339,6 +11313,10 @@
 
 External class loaders cache keyed by external jar 
path.
 
+className
 - Variable in class org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder
+ 
+className
 - Variable in class org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder.CoprocessorDescriptorImpl
+ 
 className
 - Variable in class org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.TableCoprocessorAttribute
  
 CLASSNAME_KEY
 - Static variable in interface org.apache.hadoop.hbase.client.BufferedMutator
@@ -17486,6 +17464,20 @@
 
 Creates a JarClassLoader that loads classes from the given 
paths.
 
+CoprocessorDescriptor - Interface in org.apache.hadoop.hbase.client
+
+CoprocessorDescriptor contains the details about how to 
build a coprocessor.
+
+CoprocessorDescriptorBuilder - Class in org.apache.hadoop.hbase.client
+
+Used to build the CoprocessorDescriptor
+
+CoprocessorDescriptorBuilder(String)
 - Constructor for class org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder
+ 
+CoprocessorDescriptorBuilder.CoprocessorDescriptorImpl
 - Class in org.apache.hadoop.hbase.client
+ 
+CoprocessorDescriptorImpl(String,
 String, int, Map) - Constructor for class 
org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder.CoprocessorDescriptorImpl
+ 
 CoprocessorEnvironment - Interface in org.apache.hadoop.hbase
 
 Coprocessor environment state.
@@ -18315,19 +18307,49 @@
  
 CP_HTD_ATTR_INCLUSION_KEY
 - Static variable in class org.apache.hadoop.hbase.HConstants
  
+CP_HTD_ATTR_KEY_PATTERN
 - Static variable in class org.apache.hadoop.hbase.client.TableDescriptorBuilder
+ 
 CP_HTD_ATTR_KEY_PATTERN
 - Static variable in class org.apache.hadoop.hbase.HConstants
+
+Deprecated.
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
+CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
 - Static variable in class org.apache.hadoop.hbase.client.TableDescriptorBuilder
  
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
 - Static variable in class org.apache.hadoop.hbase.HConstants
+
+Deprecated.
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
+CP_HTD_ATTR_VALUE_PARAM_PATTERN
 - Static variable in class org.apache.hadoop.hbase.client.TableDescriptorBuilder
  
 CP_HTD_

[47/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
index 5196174..f29213b 100644
--- a/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
+++ b/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":42,"i1":42,"i2":42,"i3":42,"i4":42,"i5":42,"i6":42,"i7":42,"i8":42,"i9":42,"i10":42,"i11":42,"i12":42,"i13":42,"i14":42,"i15":42,"i16":42,"i17":42,"i18":42,"i19":42,"i20":42,"i21":42,"i22":42,"i23":42,"i24":42,"i25":42,"i26":42,"i27":42,"i28":42,"i29":42,"i30":42,"i31":42,"i32":42,"i33":42,"i34":42,"i35":42,"i36":42,"i37":42,"i38":42,"i39":42,"i40":42,"i41":42,"i42":42,"i43":42,"i44":41,"i45":42,"i46":42,"i47":42,"i48":42,"i49":42,"i50":42,"i51":42,"i52":42,"i53":42,"i54":42,"i55":42,"i56":42,"i57":42,"i58":42,"i59":42,"i60":42,"i61":42,"i62":42,"i63":42,"i64":42,"i65":42,"i66":42,"i67":42,"i68":42,"i69":42,"i70":42,"i71":42,"i72":42,"i73":42};
+var methods = 
{"i0":42,"i1":42,"i2":42,"i3":42,"i4":42,"i5":42,"i6":42,"i7":42,"i8":42,"i9":42,"i10":42,"i11":42,"i12":42,"i13":42,"i14":42,"i15":42,"i16":42,"i17":42,"i18":42,"i19":42,"i20":42,"i21":42,"i22":42,"i23":42,"i24":42,"i25":42,"i26":42,"i27":42,"i28":42,"i29":42,"i30":42,"i31":42,"i32":42,"i33":42,"i34":42,"i35":42,"i36":42,"i37":42,"i38":42,"i39":42,"i40":42,"i41":42,"i42":42,"i43":42,"i44":42,"i45":41,"i46":42,"i47":42,"i48":42,"i49":42,"i50":42,"i51":42,"i52":42,"i53":42,"i54":42,"i55":42,"i56":42,"i57":42,"i58":42,"i59":42,"i60":42,"i61":42,"i62":42,"i63":42,"i64":42,"i65":42,"i66":42,"i67":42,"i68":42,"i69":42,"i70":42,"i71":42,"i72":42,"i73":42,"i74":42};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
  @InterfaceAudience.Public
-public class HTableDescriptor
+public class HTableDescriptor
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements TableDescriptor, https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable
 HTableDescriptor contains the details about an HBase table  
such as the descriptors of
@@ -461,26 +461,33 @@ implements 
+https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
+getCoprocessorDescriptors()
+Deprecated. 
+Return the list of attached co-processor represented
+
+
+
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString>
 getCoprocessors()
 Deprecated. 
 Return the list of attached co-processor represented by 
their name className
 
 
-
+
 protected 
org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDescriptor
 getDelegateeForModification()
 Deprecated. 
  
 
-
+
 Durability
 getDurability()
 Deprecated. 
 Returns the durability setting for the table.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
 getFamilies()
 Deprecated. 
@@ -488,7 +495,7 @@ implements 
+
 https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">Set
 getFamiliesKeys()
 Deprecated. 
@@ -498,7 +505,7 @@ implements 
+
 HColumnDescriptor
 getFamily(byte[] column)
 Deprecated. 
@@ -506,7 +513,7 @@ implements 
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 getFlushPolicyClassName()
 Deprecated. 
@@ -514,7 +521,7 @@ implements 
+
 long
 getMaxFileSize()
 Deprecated. 
@@ -522,40 +529,40 @@ implements 
+
 long
 getMemStoreFlushSize()
 Deprecated. 
 Returns the size of the memstore after which a flush to 
filesystem is triggered.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 getNameAsString()
 Deprecated. 
 Get the name of the table as a String
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 getOwn

[20/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index 00fdac8..ee05e07 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -16,11 +16,11 @@
 008@InterfaceAudience.Private
 009public class Version {
 010  public static final String version = 
"3.0.0-SNAPSHOT";
-011  public static final String revision = 
"31da4d0bce69b3a47066a5df675756087ce4dc60";
+011  public static final String revision = 
"22f4def942f8a3367d0ca6598317e9b9a7d0cfcd";
 012  public static final String user = 
"jenkins";
-013  public static final String date = "Thu 
Mar 15 14:41:42 UTC 2018";
+013  public static final String date = "Fri 
Mar 16 14:41:20 UTC 2018";
 014  public static final String url = 
"git://asf920.gq1.ygridcore.net/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";
-015  public static final String srcChecksum 
= "19a96f7db58e59c468ba7211c146ebe4";
+015  public static final String srcChecksum 
= "574e2041b3e629f67dd934e64524deb2";
 016}
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
index 3445980..8425334 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
@@ -1348,12 +1348,12 @@
 1340colBuilder.setTimeToLive(ttl);
 1341
 1342ColumnFamilyDescriptor 
colSessionsDesc = colBuilder.build();
-1343
builder.addColumnFamily(colSessionsDesc);
+1343
builder.setColumnFamily(colSessionsDesc);
 1344
 1345colBuilder =
 1346
ColumnFamilyDescriptorBuilder.newBuilder(META_FAMILY);
 1347colBuilder.setTimeToLive(ttl);
-1348
builder.addColumnFamily(colBuilder.build());
+1348
builder.setColumnFamily(colBuilder.build());
 1349return builder.build();
 1350  }
 1351
@@ -1388,11 +1388,11 @@
 1380  
BackupRestoreConstants.BACKUP_SYSTEM_TTL_DEFAULT);
 1381colBuilder.setTimeToLive(ttl);
 1382ColumnFamilyDescriptor 
colSessionsDesc = colBuilder.build();
-1383
builder.addColumnFamily(colSessionsDesc);
+1383
builder.setColumnFamily(colSessionsDesc);
 1384colBuilder =
 1385
ColumnFamilyDescriptorBuilder.newBuilder(META_FAMILY);
 1386colBuilder.setTimeToLive(ttl);
-1387
builder.addColumnFamily(colBuilder.build());
+1387
builder.setColumnFamily(colBuilder.build());
 1388return builder.build();
 1389  }
 1390

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
index 3445980..8425334 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
@@ -1348,12 +1348,12 @@
 1340colBuilder.setTimeToLive(ttl);
 1341
 1342ColumnFamilyDescriptor 
colSessionsDesc = colBuilder.build();
-1343
builder.addColumnFamily(colSessionsDesc);
+1343
builder.setColumnFamily(colSessionsDesc);
 1344
 1345colBuilder =
 1346
ColumnFamilyDescriptorBuilder.newBuilder(META_FAMILY);
 1347colBuilder.setTimeToLive(ttl);
-1348
builder.addColumnFamily(colBuilder.build());
+1348
builder.setColumnFamily(colBuilder.build());
 1349return builder.build();
 1350  }
 1351
@@ -1388,11 +1388,11 @@
 1380  
BackupRestoreConstants.BACKUP_SYSTEM_TTL_DEFAULT);
 1381colBuilder.setTimeToLive(ttl);
 1382ColumnFamilyDescriptor 
colSessionsDesc = colBuilder.build();
-1383
builder.addColumnFamily(colSessionsDesc);
+1383
builder.setColumnFamily(colSessionsDesc);
 1384colBuilder =
 1385
ColumnFamilyDescriptorBuilder.newBuilder(META_FAMILY);
 1386colBuilder.setTimeToLive(ttl);
-1387
builder.addColumnFamily(colBuilder.build());
+1387
builder.setColumnFamily(colBuilder.build());
 1388return builder.build();
 1389  }
 1390

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/backup/util/RestoreTool.html
-

[04/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html 
b/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
index f7d6df6..84112c6 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/wal/package-tree.html
@@ -139,8 +139,8 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.wal.IOTestProvider.AllowedOperations
 org.apache.hadoop.hbase.wal.TestWALSplit.Corruptions
+org.apache.hadoop.hbase.wal.IOTestProvider.AllowedOperations
 org.apache.hadoop.hbase.wal.FaultyFSLog.FailureType
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/testdevapidocs/overview-tree.html
--
diff --git a/testdevapidocs/overview-tree.html 
b/testdevapidocs/overview-tree.html
index 368fe98..1f25bf6 100644
--- a/testdevapidocs/overview-tree.html
+++ b/testdevapidocs/overview-tree.html
@@ -2462,6 +2462,7 @@
 org.apache.hadoop.hbase.coprocessor.TestCoprocessorConfiguration
 org.apache.hadoop.hbase.coprocessor.TestCoprocessorConfiguration.SystemCoprocessor 
(implements org.apache.hadoop.hbase.coprocessor.MasterCoprocessor, 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor, 
org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor)
 org.apache.hadoop.hbase.coprocessor.TestCoprocessorConfiguration.TableCoprocessor 
(implements org.apache.hadoop.hbase.coprocessor.RegionCoprocessor)
+org.apache.hadoop.hbase.client.TestCoprocessorDescriptor
 org.apache.hadoop.hbase.coprocessor.TestCoprocessorEndpoint
 org.apache.hadoop.hbase.coprocessor.TestCoprocessorHost
 org.apache.hadoop.hbase.coprocessor.TestCoprocessorHost.TestAbortable (implements 
org.apache.hadoop.hbase.Abortable)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/testdevapidocs/src-html/org/apache/hadoop/hbase/AcidGuaranteesTestBase.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/AcidGuaranteesTestBase.html 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/AcidGuaranteesTestBase.html
index 6944c54..05638d9 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/AcidGuaranteesTestBase.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/AcidGuaranteesTestBase.html
@@ -84,7 +84,7 @@
 076  
builder.setValue(CompactingMemStore.IN_MEMORY_FLUSH_THRESHOLD_FACTOR_KEY, 
"0.9");
 077}
 078
Stream.of(FAMILIES).map(ColumnFamilyDescriptorBuilder::of)
-079
.forEachOrdered(builder::addColumnFamily);
+079
.forEachOrdered(builder::setColumnFamily);
 080
UTIL.getAdmin().createTable(builder.build());
 081
tool.setConf(UTIL.getConfiguration());
 082  }

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/testdevapidocs/src-html/org/apache/hadoop/hbase/AcidGuaranteesTestTool.AtomicGetReader.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/AcidGuaranteesTestTool.AtomicGetReader.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/AcidGuaranteesTestTool.AtomicGetReader.html
index e7b37cc..f64dba2 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/AcidGuaranteesTestTool.AtomicGetReader.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/AcidGuaranteesTestTool.AtomicGetReader.html
@@ -328,7 +328,7 @@
 320if (!admin.tableExists(TABLE_NAME)) 
{
 321  TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(TABLE_NAME);
 322  
Stream.of(FAMILIES).map(ColumnFamilyDescriptorBuilder::of)
-323  
.forEachOrdered(builder::addColumnFamily);
+323  
.forEachOrdered(builder::setColumnFamily);
 324  
admin.createTable(builder.build());
 325}
 326ColumnFamilyDescriptor cfd = 
admin.getDescriptor(TABLE_NAME).getColumnFamilies()[0];

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/testdevapidocs/src-html/org/apache/hadoop/hbase/AcidGuaranteesTestTool.AtomicScanReader.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/AcidGuaranteesTestTool.AtomicScanReader.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/AcidGuaranteesTestTool.AtomicScanReader.html
index e7b37cc..f64dba2 100644
--- 
a/testdev

[40/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
index 05c0542..2d09bf8 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
@@ -35,1393 +35,1419 @@
 027import java.util.HashSet;
 028import java.util.List;
 029import java.util.Map;
-030import java.util.Set;
-031import java.util.TreeMap;
-032import java.util.TreeSet;
-033import java.util.function.Function;
-034import java.util.regex.Matcher;
-035import org.apache.hadoop.fs.Path;
-036import 
org.apache.hadoop.hbase.Coprocessor;
-037import 
org.apache.hadoop.hbase.HConstants;
-038import 
org.apache.hadoop.hbase.TableName;
-039import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-040import 
org.apache.hadoop.hbase.security.User;
-041import 
org.apache.hadoop.hbase.util.Bytes;
-042import 
org.apache.yetus.audience.InterfaceAudience;
-043import org.slf4j.Logger;
-044import org.slf4j.LoggerFactory;
-045
-046import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-047import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-048
-049/**
-050 * @since 2.0.0
-051 */
-052@InterfaceAudience.Public
-053public class TableDescriptorBuilder {
-054  public static final Logger LOG = 
LoggerFactory.getLogger(TableDescriptorBuilder.class);
-055  @InterfaceAudience.Private
-056  public static final String SPLIT_POLICY 
= "SPLIT_POLICY";
-057  private static final Bytes 
SPLIT_POLICY_KEY = new Bytes(Bytes.toBytes(SPLIT_POLICY));
-058  /**
-059   * Used by HBase Shell interface to 
access this metadata
-060   * attribute which denotes the maximum 
size of the store file after which a
-061   * region split occurs.
-062   */
-063  @InterfaceAudience.Private
-064  public static final String MAX_FILESIZE 
= "MAX_FILESIZE";
-065  private static final Bytes 
MAX_FILESIZE_KEY
-066  = new 
Bytes(Bytes.toBytes(MAX_FILESIZE));
-067
-068  @InterfaceAudience.Private
-069  public static final String OWNER = 
"OWNER";
+030import java.util.Objects;
+031import java.util.Optional;
+032import java.util.Set;
+033import java.util.TreeMap;
+034import java.util.TreeSet;
+035import java.util.function.Function;
+036import java.util.regex.Matcher;
+037import java.util.regex.Pattern;
+038import 
org.apache.hadoop.hbase.Coprocessor;
+039import 
org.apache.hadoop.hbase.HConstants;
+040import 
org.apache.hadoop.hbase.TableName;
+041import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
+042import 
org.apache.hadoop.hbase.security.User;
+043import 
org.apache.hadoop.hbase.util.Bytes;
+044import 
org.apache.yetus.audience.InterfaceAudience;
+045import org.slf4j.Logger;
+046import org.slf4j.LoggerFactory;
+047
+048import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+049import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
+050
+051/**
+052 * @since 2.0.0
+053 */
+054@InterfaceAudience.Public
+055public class TableDescriptorBuilder {
+056  public static final Logger LOG = 
LoggerFactory.getLogger(TableDescriptorBuilder.class);
+057  @InterfaceAudience.Private
+058  public static final String SPLIT_POLICY 
= "SPLIT_POLICY";
+059  private static final Bytes 
SPLIT_POLICY_KEY = new Bytes(Bytes.toBytes(SPLIT_POLICY));
+060  /**
+061   * Used by HBase Shell interface to 
access this metadata
+062   * attribute which denotes the maximum 
size of the store file after which a
+063   * region split occurs.
+064   */
+065  @InterfaceAudience.Private
+066  public static final String MAX_FILESIZE 
= "MAX_FILESIZE";
+067  private static final Bytes 
MAX_FILESIZE_KEY
+068  = new 
Bytes(Bytes.toBytes(MAX_FILESIZE));
+069
 070  @InterfaceAudience.Private
-071  public static final Bytes OWNER_KEY
-072  = new 
Bytes(Bytes.toBytes(OWNER));
-073
-074  /**
-075   * Used by rest interface to access 
this metadata attribute
-076   * which denotes if the table is Read 
Only.
-077   */
-078  @InterfaceAudience.Private
-079  public static final String READONLY = 
"READONLY";
-080  private static final Bytes 
READONLY_KEY
-081  = new 
Bytes(Bytes.toBytes(READONLY));
-082
-083  /**
-084   * Used by HBase Shell interface to 
access this metadata
-085   * attribute which denotes if the table 
is compaction enabled.
-086   */
-087  @InterfaceAudience.Private
-088  public static final String 
COMPACTION_ENABLED = "COMPACTION_ENABLED";
-089  private static final Bytes 
COMPACTION_ENABLED_KEY
-090  = new 
Bytes(Bytes.toBytes(COMPACTION_ENABLED));
-091
-092  /**
-093   * Used by HBase Shell interface to 
access this metadata
-094   * attribute which represents the 
maximum size of the mems

[45/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/apidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html 
b/apidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
index 81c7c35..6edb70c 100644
--- a/apidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
+++ b/apidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":9,"i6":9,"i7":10,"i8":9,"i9":9,"i10":9,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":42,"i22":42,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":9};
+var methods = 
{"i0":10,"i1":9,"i2":9,"i3":10,"i4":9,"i5":9,"i6":9,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":42,"i23":42,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class TableDescriptorBuilder
+public class TableDescriptorBuilder
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 
 Since:
@@ -135,43 +135,47 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 Field and Description
 
 
+static https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern
+CP_HTD_ATTR_KEY_PATTERN 
+
+
 static boolean
 DEFAULT_COMPACTION_ENABLED
 Constant that denotes whether the table is compaction 
enabled by default
 
 
-
+
 static long
 DEFAULT_MEMSTORE_FLUSH_SIZE
 Constant that denotes the maximum default size of the 
memstore after which
  the contents are flushed to the store files
 
 
-
+
 static boolean
 DEFAULT_NORMALIZATION_ENABLED
 Constant that denotes whether the table is normalized by 
default.
 
 
-
+
 static boolean
 DEFAULT_READONLY
 Constant that denotes whether the table is READONLY by 
default and is false
 
 
-
+
 static boolean
 DEFAULT_REGION_MEMSTORE_REPLICATION 
 
-
+
 static int
 DEFAULT_REGION_REPLICATION 
 
-
+
 static org.slf4j.Logger
 LOG 
 
-
+
 static TableDescriptor
 NAMESPACE_TABLEDESC
 Table descriptor for namespace table
@@ -193,151 +197,152 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 Method and Description
 
 
-TableDescriptorBuilder
-addColumnFamily(ColumnFamilyDescriptor family) 
-
-
-TableDescriptorBuilder
-addCoprocessor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String className) 
-
-
-TableDescriptorBuilder
-addCoprocessor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String className,
-  org.apache.hadoop.fs.Path jarFilePath,
-  int priority,
-  https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String> kvs) 
-
-
-TableDescriptorBuilder
-addCoprocessorWithSpec(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String specStr) 
-
-
 TableDescriptor
 build() 
 
-
+
 static TableDescriptor
 copy(TableDescriptor desc) 
 
-
+
 static TableDescriptor
 copy(TableName name,
 TableDescriptor desc) 
 
-
+
 TableDescriptorBuilder
 modifyColumnFamily(ColumnFamilyDescriptor family) 
 
-
+
 static TableDescriptorBuilder
 newBuilder(TableDescriptor desc)
 Copy all values, families, and name from the input.
 
 
-
+
 static TableDescriptorBuilder
 newBuilder(TableName name) 
 
-
+
 static TableDescriptor
 parseFrom(byte[] pbBytes)
 The input should be created by toByteArray(org.apache.hadoop.hbase.client.TableDescriptor).
 
 
-
+
 TableDescriptorBuilder
 removeColumnFamily(byte[] name) 
 
-
+
 TableDescriptorBuilder
 removeCoprocessor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String className) 
 
-
+
 TableDescriptorBuilder
 removeValue(byte[] key) 
 
-
+
 TableDescriptorBuilder
 removeValue(B

[32/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
index 8d19f36..0c03788 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/ImmutableHTableDescriptor.html
@@ -207,7 +207,7 @@ extends 
 
 Methods inherited from class org.apache.hadoop.hbase.HTableDescriptor
-addCoprocessor,
 addCoprocessor,
 addCoprocessorWithSpec,
 addFamily,
 compareTo,
 equals,
 getColumnFamilies,
 getColumnFamily, getColumnFamilyCount,
 getColumnFamilyNames,
 getConfiguration,
 getConfigurationValue,
 getCoprocessors,
 getDurability,
 getFamilies,
 getFamiliesKeys,
 getFamily,
 getFlushPolicyClassName,
 getMaxFileSize,
 getMemStoreFlushSize,
 getNameAsString,
 getOwnerString,
 getPriority,
 getRegionReplication,
 getRegionSplitPolic
 yClassName, getTableName,
 getValue,
 getValue,
 getValue,
 getValues,
 hasColumnFamily,
 hasCoprocessor,
 hasFamily,
 hashCode<
 /a>, hasRegionMemstoreReplication,
 hasRegionMemStoreReplication,
 isCompactionEnabled,
 isMetaRegion,
 isMetaTable,
 isNormalizationEnabled,
 isReadOnly,
 isRootRegion,
 modifyFamily, 
parseFrom,
 remove,
 remove,
 remove,
 removeConfiguration,
 removeCoprocessor,
 removeFamily,
 setCompac
 tionEnabled, setConfiguration,
 setDurability,
 setFlushPolicyClassName,
 setMaxFileSize,
 setMemStoreFlushSize,
 setNormalizationEnabled,
 setOwner,
 setOwnerString, 
setPriority,
 setReadOnly,
 setRegionMemstoreReplication,
 setRegionMemStoreReplication,
 setRegionReplication,
 setRegionSplitPolicyClassName,
 setValue,
 setValue,
 setValue,
 toByteArray,
 toString,
 toStringCustomizedValues,
 toStringTableAttributes
+addCoprocessor,
 addCoprocessor,
 addCoprocessorWithSpec,
 addFamily,
 compareTo,
 equals,
 getColumnFamilies,
 getColumnFamily, getColumnFamilyCount,
 getColumnFamilyNames,
 getConfiguration,
 getConfigurationValue,
 getCoprocessorDescriptors,
 getCoprocessors,
 getDurability,
 getFamilies,
 getFamiliesKeys,
 getFamily,
 getFlushPolicyClassName,
 getMaxFileSize,
 getMemStoreFlushSize,
 getNameAsString,
 getOwnerString,
 getPriority,
 getRegionReplicati
 on, getRegionSplitPolicyClassName,
 getTableName,
 getValue,
 getValue,
 getValue,
 getValues,
 hasColumnFamily,
 hasCoprocessor,
 hasFamily, hashCode,
 hasRegionMemstoreReplication,
 hasRegionMemStoreReplication,
 isCompactionEnabled,
 isMetaRegion,
 isMetaTable,
 isNormalizationEnabled,
 isReadOnly,
 isRootRegion, modifyFamily,
 parseFrom,
 remove,
 remove,
 remove,
 removeConfiguration,
 removeCoprocessor,
 removeFamily, setCompactionEnabled,
 setConfiguration,
 setDurability,
 setFlushPolicyClassName,
 setMaxFileSize,
 setMemStoreFlushSize,
 setNormalizationEnabled,
 setOwner,
 setOwnerString,
 setPriority,
 setReadOnly,
 setRegionMemstoreReplication,
 setRegionMemStoreReplication,
 setRegionReplication,
 setRegionSplitPolicyClassName,
 setValue,
 setValue,
 setValue,
 toByteArray,
 toString,
 toStringCustomizedValues,
 toStringTableAttributes
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/client/TableDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptor.html
index acdc691..373c742 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptor.html
@@ -183,10 +183,9 @@ public interface getComparator(https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true";
 title="class or interface in java.util">Comparator cfComparator) 
 
 
-https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">CollectionString>
-getCoprocessors()
-Return the list of attached co-processor represented by 
their name
- className
+https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
+getCoprocessorDescriptors()
+Return the list of attached co-processor represented
 
 
 
@@ -391,18 +390,17 @@ static final https://docs.oracle.com/javase/8/docs/api/java/util/C
 
 
 
-
+
 
 
 
 
-getCoprocessors
-https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection

[41/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/apidocs/src-html/org/apache/hadoop/hbase/client/CoprocessorDescriptor.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/CoprocessorDescriptor.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/CoprocessorDescriptor.html
new file mode 100644
index 000..1064470
--- /dev/null
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/CoprocessorDescriptor.html
@@ -0,0 +1,123 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+Source code
+
+
+
+
+001/**
+002 *
+003 * Licensed to the Apache Software 
Foundation (ASF) under one
+004 * or more contributor license 
agreements.  See the NOTICE file
+005 * distributed with this work for 
additional information
+006 * regarding copyright ownership.  The 
ASF licenses this file
+007 * to you under the Apache License, 
Version 2.0 (the
+008 * "License"); you may not use this file 
except in compliance
+009 * with the License.  You may obtain a 
copy of the License at
+010 *
+011 * 
http://www.apache.org/licenses/LICENSE-2.0
+012 *
+013 * Unless required by applicable law or 
agreed to in writing, software
+014 * distributed under the License is 
distributed on an "AS IS" BASIS,
+015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+016 * See the License for the specific 
language governing permissions and
+017 * limitations under the License.
+018 */
+019package org.apache.hadoop.hbase.client;
+020
+021import java.util.Map;
+022import java.util.Optional;
+023import 
org.apache.yetus.audience.InterfaceAudience;
+024
+025/**
+026 * CoprocessorDescriptor contains the 
details about how to build a coprocessor.
+027 * This class is a pojo so there are no 
checks for the details carried by this class.
+028 * Use {@link 
CoprocessorDescriptorBuilder} to instantiate a CoprocessorDescriptor
+029 */
+030@InterfaceAudience.Public
+031public interface CoprocessorDescriptor 
{
+032  /**
+033   * @return the name of the class or 
interface represented by this object.
+034   */
+035  String getClassName();
+036
+037  /**
+038   * @return Path of the jar file. If 
it's null, the class will be loaded from default classloader.
+039   */
+040  Optional getJarPath();
+041
+042  /**
+043   * @return The order to execute this 
coprocessor
+044   */
+045  int getPriority();
+046
+047  /**
+048   * @return Arbitrary key-value 
parameter pairs passed into the  coprocessor.
+049   */
+050  Map 
getProperties();
+051}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/apidocs/src-html/org/apache/hadoop/hbase/client/CoprocessorDescriptorBuilder.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/CoprocessorDescriptorBuilder.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/client/CoprocessorDescriptorBuilder.html
new file mode 100644
index 000..ef0d80f
--- /dev/null
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/CoprocessorDescriptorBuilder.html
@@ -0,0 +1,190 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+Source code
+
+
+
+
+001/**
+002 *
+003 * Licensed to the Apache Software 
Foundation (ASF) under one
+004 * or more contributor license 
agreements.  See the NOTICE file
+005 * distributed with this work for 
additional information
+006 * regarding copyright ownership.  The 
ASF licenses this file
+007 * to you under the Apache License, 
Version 2.0 (the
+008 * "License"); you may not use this file 
except in compliance
+009 * with the License.  You may obtain a 
copy of the License at
+010 *
+011 * 
http://www.apache.org/licenses/LICENSE-2.0
+012 *
+013 * Unless required by applicable law or 
agreed to in writing, software
+014 * distributed under the License is 
distributed on an "AS IS" BASIS,
+015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+016 * See the License for the specific 
language governing permissions and
+017 * limitations under the License.
+018 */
+019package org.apache.hadoop.hbase.client;
+020
+021import java.util.Collections;
+022import java.util.Map;
+023import java.util.Objects;
+024import java.util.Optional;
+025import java.util.TreeMap;
+026import 
org.apache.hadoop.hbase.Coprocessor;
+027import 
org.apache.yetus.audience.InterfaceAudience;
+028
+029/**
+030 * Used to build the {@link 
CoprocessorDescriptor}
+031 */
+032@InterfaceAudience.Public
+033public final class 
CoprocessorDescriptorBuilder {
+034
+035  public static CoprocessorDescriptor 
of(String className) {
+036return new 
CoprocessorDescriptorBuilder(className).build();
+037  }
+038
+039  public static 
CoprocessorDescriptorBuilder newBuilder(String className) {
+040return new 
CoprocessorDescriptorBuilder(className);
+04

[21/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
index cbbd63f..19efd1a 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
@@ -28,873 +28,886 @@
 020
 021import java.io.IOException;
 022import java.util.Collection;
-023import java.util.List;
-024import java.util.Map;
-025import java.util.Set;
-026import java.util.stream.Collectors;
-027import java.util.stream.Stream;
-028import org.apache.hadoop.fs.Path;
-029import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-030import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor;
-031import 
org.apache.hadoop.hbase.client.Durability;
-032import 
org.apache.hadoop.hbase.client.TableDescriptor;
-033import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-034import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDescriptor;
-035import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-036import 
org.apache.hadoop.hbase.security.User;
-037import 
org.apache.hadoop.hbase.util.Bytes;
-038import 
org.apache.yetus.audience.InterfaceAudience;
-039
-040/**
-041 * HTableDescriptor contains the details 
about an HBase table  such as the descriptors of
-042 * all the column families, is the table 
a catalog table,  hbase:meta ,
-043 * if the table is read only, the maximum 
size of the memstore,
-044 * when the region split should occur, 
coprocessors associated with it etc...
-045 * @deprecated As of release 2.0.0, this 
will be removed in HBase 3.0.0.
-046 * Use {@link 
TableDescriptorBuilder} to build {@link HTableDescriptor}.
-047 */
-048@Deprecated
-049@InterfaceAudience.Public
-050public class HTableDescriptor implements 
TableDescriptor, Comparable {
-051  public static final String SPLIT_POLICY 
= TableDescriptorBuilder.SPLIT_POLICY;
-052  public static final String MAX_FILESIZE 
= TableDescriptorBuilder.MAX_FILESIZE;
-053  public static final String OWNER = 
TableDescriptorBuilder.OWNER;
-054  public static final Bytes OWNER_KEY = 
TableDescriptorBuilder.OWNER_KEY;
-055  public static final String READONLY = 
TableDescriptorBuilder.READONLY;
-056  public static final String 
COMPACTION_ENABLED = TableDescriptorBuilder.COMPACTION_ENABLED;
-057  public static final String 
MEMSTORE_FLUSHSIZE = TableDescriptorBuilder.MEMSTORE_FLUSHSIZE;
-058  public static final String FLUSH_POLICY 
= TableDescriptorBuilder.FLUSH_POLICY;
-059  public static final String IS_ROOT = 
"IS_ROOT";
-060  public static final String IS_META = 
TableDescriptorBuilder.IS_META;
-061  public static final String DURABILITY = 
TableDescriptorBuilder.DURABILITY;
-062  public static final String 
REGION_REPLICATION = TableDescriptorBuilder.REGION_REPLICATION;
-063  public static final String 
REGION_MEMSTORE_REPLICATION = 
TableDescriptorBuilder.REGION_MEMSTORE_REPLICATION;
-064  public static final String 
NORMALIZATION_ENABLED = TableDescriptorBuilder.NORMALIZATION_ENABLED;
-065  public static final String PRIORITY = 
TableDescriptorBuilder.PRIORITY;
-066  public static final boolean 
DEFAULT_READONLY = TableDescriptorBuilder.DEFAULT_READONLY;
-067  public static final boolean 
DEFAULT_COMPACTION_ENABLED = 
TableDescriptorBuilder.DEFAULT_COMPACTION_ENABLED;
-068  public static final boolean 
DEFAULT_NORMALIZATION_ENABLED = 
TableDescriptorBuilder.DEFAULT_NORMALIZATION_ENABLED;
-069  public static final long 
DEFAULT_MEMSTORE_FLUSH_SIZE = 
TableDescriptorBuilder.DEFAULT_MEMSTORE_FLUSH_SIZE;
-070  public static final int 
DEFAULT_REGION_REPLICATION = 
TableDescriptorBuilder.DEFAULT_REGION_REPLICATION;
-071  public static final boolean 
DEFAULT_REGION_MEMSTORE_REPLICATION = 
TableDescriptorBuilder.DEFAULT_REGION_MEMSTORE_REPLICATION;
-072  protected final 
ModifyableTableDescriptor delegatee;
-073
-074  /**
-075   * Construct a table descriptor 
specifying a TableName object
-076   * @param name Table name.
-077   * @see HADOOP-1581 HBASE: 
(HBASE-174) Un-openable tablename bug
-078   */
-079  public HTableDescriptor(final TableName 
name) {
-080this(new 
ModifyableTableDescriptor(name));
-081  }
-082
-083  /**
-084   * Construct a table descriptor by 
cloning the descriptor passed as a parameter.
-085   * 

-086 * Makes a deep copy of the supplied descriptor. -087 * Can make a modifiable descriptor from an ImmutableHTableDescriptor. -088 * @param desc The descriptor. -089 */ -090 public HTableDescriptor(final HTableDescriptor desc) { -091this(desc, true); -092 } -093 -094 protected HTableDes


[16/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.BulkLoadObserverOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.BulkLoadObserverOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.BulkLoadObserverOperation.html
index b99f924..2bb6cea 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.BulkLoadObserverOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.BulkLoadObserverOperation.html
@@ -37,1779 +37,1734 @@
 029import java.util.UUID;
 030import 
java.util.concurrent.ConcurrentHashMap;
 031import 
java.util.concurrent.ConcurrentMap;
-032import java.util.regex.Matcher;
-033
-034import 
org.apache.commons.collections4.map.AbstractReferenceMap;
-035import 
org.apache.commons.collections4.map.ReferenceMap;
-036import 
org.apache.hadoop.conf.Configuration;
-037import org.apache.hadoop.fs.FileSystem;
-038import org.apache.hadoop.fs.Path;
-039import org.apache.hadoop.hbase.Cell;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.Coprocessor;
-042import 
org.apache.hadoop.hbase.HBaseConfiguration;
-043import 
org.apache.hadoop.hbase.HConstants;
-044import 
org.apache.hadoop.hbase.RawCellBuilder;
-045import 
org.apache.hadoop.hbase.RawCellBuilderFactory;
-046import 
org.apache.hadoop.hbase.ServerName;
-047import 
org.apache.hadoop.hbase.SharedConnection;
-048import 
org.apache.hadoop.hbase.client.Append;
-049import 
org.apache.hadoop.hbase.client.Connection;
-050import 
org.apache.hadoop.hbase.client.Delete;
-051import 
org.apache.hadoop.hbase.client.Durability;
-052import 
org.apache.hadoop.hbase.client.Get;
-053import 
org.apache.hadoop.hbase.client.Increment;
-054import 
org.apache.hadoop.hbase.client.Mutation;
-055import 
org.apache.hadoop.hbase.client.Put;
-056import 
org.apache.hadoop.hbase.client.RegionInfo;
-057import 
org.apache.hadoop.hbase.client.Result;
-058import 
org.apache.hadoop.hbase.client.Scan;
-059import 
org.apache.hadoop.hbase.client.TableDescriptor;
-060import 
org.apache.hadoop.hbase.coprocessor.BaseEnvironment;
-061import 
org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
-062import 
org.apache.hadoop.hbase.coprocessor.CoprocessorException;
-063import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-064import 
org.apache.hadoop.hbase.coprocessor.CoprocessorService;
-065import 
org.apache.hadoop.hbase.coprocessor.CoprocessorServiceBackwardCompatiblity;
-066import 
org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
-067import 
org.apache.hadoop.hbase.coprocessor.EndpointObserver;
-068import 
org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
-069import 
org.apache.hadoop.hbase.coprocessor.MetricsCoprocessor;
-070import 
org.apache.hadoop.hbase.coprocessor.ObserverContext;
-071import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
-072import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-073import 
org.apache.hadoop.hbase.coprocessor.RegionObserver;
-074import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-075import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-076import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-077import 
org.apache.hadoop.hbase.io.Reference;
-078import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-079import 
org.apache.hadoop.hbase.metrics.MetricRegistry;
-080import 
org.apache.hadoop.hbase.regionserver.Region.Operation;
-081import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-082import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
-083import 
org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
-084import 
org.apache.hadoop.hbase.security.User;
-085import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.CoprocessorClassLoader;
-088import 
org.apache.hadoop.hbase.util.Pair;
-089import 
org.apache.hadoop.hbase.wal.WALEdit;
-090import 
org.apache.hadoop.hbase.wal.WALKey;
-091import 
org.apache.yetus.audience.InterfaceAudience;
-092import org.slf4j.Logger;
-093import org.slf4j.LoggerFactory;
-094
-095/**
-096 * Implements the coprocessor environment 
and runtime support for coprocessors
-097 * loaded within a {@link Region}.
-098 */
-099@InterfaceAudience.Private
-100public class RegionCoprocessorHost
-101extends 
CoprocessorHost {
-102
-103  private static final Logger LOG = 
LoggerFactory.getLogger(RegionCoprocessorHost.class);
-104  // The shared data map
-105  private static final 
ReferenceMap> SHARED_DATA_MAP 
=
-106  new 
Referen

[02/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi2.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi2.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi2.html
index 939c65a..3261918 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi2.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi2.html
@@ -78,7 +78,7 @@
 070  public void testAddColumnFamily() 
throws Exception {
 071// Create a table with two families
 072TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(tableName);
-073
builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0));
+073
builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0));
 074
admin.createTable(builder.build()).join();
 075
admin.disableTable(tableName).join();
 076// Verify the table descriptor
@@ -93,7 +93,7 @@
 085  public void 
testAddSameColumnFamilyTwice() throws Exception {
 086// Create a table with one families
 087TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(tableName);
-088
builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0));
+088
builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0));
 089
admin.createTable(builder.build()).join();
 090
admin.disableTable(tableName).join();
 091// Verify the table descriptor
@@ -117,7 +117,7 @@
 109TableDescriptorBuilder tdBuilder = 
TableDescriptorBuilder.newBuilder(tableName);
 110ColumnFamilyDescriptor cfd = 
ColumnFamilyDescriptorBuilder.of(FAMILY_0);
 111int blockSize = cfd.getBlocksize();
-112
admin.createTable(tdBuilder.addColumnFamily(cfd).build()).join();
+112
admin.createTable(tdBuilder.setColumnFamily(cfd).build()).join();
 113
admin.disableTable(tableName).join();
 114// Verify the table descriptor
 115verifyTableDescriptor(tableName, 
FAMILY_0);
@@ -137,7 +137,7 @@
 129TableDescriptorBuilder tdBuilder = 
TableDescriptorBuilder.newBuilder(tableName);
 130ColumnFamilyDescriptor cfd = 
ColumnFamilyDescriptorBuilder.of(FAMILY_0);
 131int blockSize = cfd.getBlocksize();
-132
admin.createTable(tdBuilder.addColumnFamily(cfd).build()).join();
+132
admin.createTable(tdBuilder.setColumnFamily(cfd).build()).join();
 133
admin.disableTable(tableName).join();
 134// Verify the table descriptor
 135verifyTableDescriptor(tableName, 
FAMILY_0);
@@ -158,8 +158,8 @@
 150  public void testDeleteColumnFamily() 
throws Exception {
 151// Create a table with two families
 152TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(tableName);
-153
builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0))
-154
.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_1));
+153
builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0))
+154
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_1));
 155
admin.createTable(builder.build()).join();
 156
admin.disableTable(tableName).join();
 157// Verify the table descriptor
@@ -174,8 +174,8 @@
 166  public void 
testDeleteSameColumnFamilyTwice() throws Exception {
 167// Create a table with two families
 168TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(tableName);
-169
builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0))
-170
.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_1));
+169
builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_0))
+170
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY_1));
 171
admin.createTable(builder.build()).join();
 172
admin.disableTable(tableName).join();
 173// Verify the table descriptor

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi3.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi3.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi3.html
index 04d5471..dfa687f 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi3.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi3.html
@@ -128,7 +128,7 @@
 120byte[][] families = { FAMILY, 
FAMILY_0, FAMILY_1 };
 121TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(tableName);
 122for (byte[] family : families) {
-123  
builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(family));
+123  
builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family));
 124

[07/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html 
b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
index 3a91ea6..670bcf3 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/class-use/HBaseClassTestRule.html
@@ -975,630 +975,634 @@
 
 
 static HBaseClassTestRule
-TestImmutableHRegionInfo.CLASS_RULE 
+TestCoprocessorDescriptor.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestMutation.CLASS_RULE 
+TestImmutableHRegionInfo.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestRowComparator.CLASS_RULE 
+TestMutation.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestBufferedMutator.CLASS_RULE 
+TestRowComparator.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestClientScanner.CLASS_RULE 
+TestBufferedMutator.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestInterfaceAlign.CLASS_RULE 
+TestClientScanner.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestBufferedMutatorParams.CLASS_RULE 
+TestInterfaceAlign.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestSnapshotFromAdmin.CLASS_RULE 
+TestBufferedMutatorParams.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestIncrement.CLASS_RULE 
+TestSnapshotFromAdmin.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncProcessWithRegionException.CLASS_RULE 
+TestIncrement.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestHTableMultiplexerViaMocks.CLASS_RULE 
+TestAsyncProcessWithRegionException.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestColumnFamilyDescriptorBuilder.CLASS_RULE 
+TestHTableMultiplexerViaMocks.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestMetricsConnection.CLASS_RULE 
+TestColumnFamilyDescriptorBuilder.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestTableDescriptorBuilder.CLASS_RULE 
+TestMetricsConnection.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestImmutableHColumnDescriptor.CLASS_RULE 
+TestTableDescriptorBuilder.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestImmutableHTableDescriptor.CLASS_RULE 
+TestImmutableHColumnDescriptor.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestRetriesExhaustedWithDetailsException.CLASS_RULE 
+TestImmutableHTableDescriptor.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncProcess.CLASS_RULE 
+TestRetriesExhaustedWithDetailsException.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestProcedureFuture.CLASS_RULE 
+TestAsyncProcess.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestClientExponentialBackoff.CLASS_RULE 
+TestProcedureFuture.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestDeleteTimeStamp.CLASS_RULE 
+TestClientExponentialBackoff.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestOperation.CLASS_RULE 
+TestDeleteTimeStamp.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestMultiParallel.CLASS_RULE 
+TestOperation.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestSnapshotMetadata.CLASS_RULE 
+TestMultiParallel.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestFromClientSideNoCodec.CLASS_RULE 
+TestSnapshotMetadata.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncBufferMutator.CLASS_RULE 
+TestFromClientSideNoCodec.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestSizeFailures.CLASS_RULE 
+TestAsyncBufferMutator.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestGetProcedureResult.CLASS_RULE 
+TestSizeFailures.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncTableScanRenewLease.CLASS_RULE 
+TestGetProcedureResult.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestClientOperationInterrupt.CLASS_RULE 
+TestAsyncTableScanRenewLease.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestScanWithoutFetchingData.CLASS_RULE 
+TestClientOperationInterrupt.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestServerLoadDurability.CLASS_RULE 
+TestScanWithoutFetchingData.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncTableScan.CLASS_RULE 
+TestServerLoadDurability.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestMalformedCellFromClient.CLASS_RULE 
+TestAsyncTableScan.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncReplicationAdminApiWithClusters.CLASS_RULE 
+TestMalformedCellFromClient.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestCheckAndMutate.CLASS_RULE 
+TestAsyncReplicationAdminApiWithClusters.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestFastFail.CLASS_RULE 
+TestCheckAndMutate.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAppendFromClientSide.CLASS_RULE 
+TestFastFail.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestServerBusyException.CLASS_RULE 
+TestAppendFromClientSide.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestMobSnapshotFromClient.CLASS_RULE 
+TestServerBusyException.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestAsyncQuotaAdminApi.CLASS_RULE 
+TestMobSnapshotFromClient.CLASS_RULE 
 
 
 static HBaseClassTestRule
-TestSplitOrMergeStatus.CLASS_RULE 
+TestAsyncQuotaAdminApi.CLASS_RULE 
 
 
 st

[35/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HConstants.html 
b/devapidocs/org/apache/hadoop/hbase/HConstants.html
index feefcbf..246c31a 100644
--- a/devapidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/devapidocs/org/apache/hadoop/hbase/HConstants.html
@@ -274,25 +274,42 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern
-CP_HTD_ATTR_KEY_PATTERN 
+CP_HTD_ATTR_KEY_PATTERN
+Deprecated. 
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN 
+CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
+Deprecated. 
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern
-CP_HTD_ATTR_VALUE_PARAM_PATTERN 
+CP_HTD_ATTR_VALUE_PARAM_PATTERN
+Deprecated. 
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN 
+CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
+Deprecated. 
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_VALUE_PATTERN
-
- Pattern that matches a coprocessor specification.
+Deprecated. 
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
 
 
 
@@ -4772,7 +4789,9 @@ public static final https://docs.oracle.com/javase/8/docs/api/java
 
 
 CP_HTD_ATTR_KEY_PATTERN
-public static final https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern CP_HTD_ATTR_KEY_PATTERN
+https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
+public static final https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern CP_HTD_ATTR_KEY_PATTERN
+Deprecated. It is used internally. As of release 2.0.0, this 
will be removed in HBase 3.0.0.
 
 
 
@@ -4781,7 +4800,9 @@ public static final https://docs.oracle.com/javase/8/docs/api/java
 
 
 CP_HTD_ATTR_VALUE_PATTERN
-public static final https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern CP_HTD_ATTR_VALUE_PATTERN
+https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
+public static final https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern CP_HTD_ATTR_VALUE_PATTERN
+Deprecated. It is used internally. As of release 2.0.0, this 
will be removed in HBase 3.0.0.
 
  Pattern that matches a coprocessor specification. Form is:
    '|'  ['|' 
 ['|' ]]
@@ -4796,7 +4817,9 @@ public static final https://docs.oracle.com/javase/8/docs/api/java
 
 
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
-public static final https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
+https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
+public static final https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
+Deprecated. It is used internally. As of release 2.0.0, this 
will be removed in HBase 3.0.0.
 
 See Also:
 Constant
 Field Values
@@ -4809,7 +4832,9 @@ public static final https://docs.oracle.com/javase/8/docs/api/java
 
 
 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
-public static final https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
+https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or i

[03/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.html
index 13d376b..249cd71 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.html
@@ -55,389 +55,388 @@
 047import 
org.apache.hadoop.hbase.util.JVMClusterUtil;
 048import 
org.apache.hadoop.hbase.util.Threads;
 049import org.junit.ClassRule;
-050import org.junit.Ignore;
-051import org.junit.Test;
-052import 
org.junit.experimental.categories.Category;
-053import org.junit.runner.RunWith;
-054import org.junit.runners.Parameterized;
-055
-056/**
-057 * Class to test asynchronous region 
admin operations.
-058 * @see TestAsyncRegionAdminApi2 This 
test and it used to be joined it was taking longer than our
-059 * ten minute timeout so they were 
split.
-060 */
-061@RunWith(Parameterized.class)
-062@Category({ LargeTests.class, 
ClientTests.class })
-063public class TestAsyncRegionAdminApi 
extends TestAsyncAdminBase {
-064  @ClassRule
-065  public static final HBaseClassTestRule 
CLASS_RULE =
-066  
HBaseClassTestRule.forClass(TestAsyncRegionAdminApi.class);
-067
-068  @Test
-069  public void 
testAssignRegionAndUnassignRegion() throws Exception {
-070
createTableWithDefaultConf(tableName);
-071
-072// assign region.
-073HMaster master = 
TEST_UTIL.getHBaseCluster().getMaster();
-074AssignmentManager am = 
master.getAssignmentManager();
-075RegionInfo hri = 
am.getRegionStates().getRegionsOfTable(tableName).get(0);
-076
-077// assert region on server
-078RegionStates regionStates = 
am.getRegionStates();
-079ServerName serverName = 
regionStates.getRegionServerOfRegion(hri);
-080TEST_UTIL.assertRegionOnServer(hri, 
serverName, 200);
-081
assertTrue(regionStates.getRegionState(hri).isOpened());
-082
-083// Region is assigned now. Let's 
assign it again.
-084// Master should not abort, and 
region should stay assigned.
-085
admin.assign(hri.getRegionName()).get();
-086try {
-087  am.waitForAssignment(hri);
-088  fail("Expected 
NoSuchProcedureException");
-089} catch (NoSuchProcedureException e) 
{
-090  // Expected
-091}
-092
assertTrue(regionStates.getRegionState(hri).isOpened());
-093
-094// unassign region
-095admin.unassign(hri.getRegionName(), 
true).get();
-096try {
-097  am.waitForAssignment(hri);
-098  fail("Expected 
NoSuchProcedureException");
-099} catch (NoSuchProcedureException e) 
{
-100  // Expected
-101}
-102
assertTrue(regionStates.getRegionState(hri).isClosed());
-103  }
-104
-105  RegionInfo 
createTableAndGetOneRegion(final TableName tableName)
-106  throws IOException, 
InterruptedException, ExecutionException {
-107TableDescriptor desc =
-108
TableDescriptorBuilder.newBuilder(tableName)
-109
.addColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build();
-110admin.createTable(desc, 
Bytes.toBytes("A"), Bytes.toBytes("Z"), 5).get();
-111
-112// wait till the table is assigned
-113HMaster master = 
TEST_UTIL.getHBaseCluster().getMaster();
-114long timeoutTime = 
System.currentTimeMillis() + 3000;
-115while (true) {
-116  List regions =
-117  
master.getAssignmentManager().getRegionStates().getRegionsOfTable(tableName);
-118  if (regions.size() > 3) {
-119return regions.get(2);
-120  }
-121  long now = 
System.currentTimeMillis();
-122  if (now > timeoutTime) {
-123fail("Could not find an online 
region");
-124  }
-125  Thread.sleep(10);
-126}
-127  }
-128
-129  @Test
-130  public void 
testGetRegionByStateOfTable() throws Exception {
-131RegionInfo hri = 
createTableAndGetOneRegion(tableName);
-132
-133RegionStates regionStates =
-134
TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager().getRegionStates();
-135
assertTrue(regionStates.getRegionByStateOfTable(tableName).get(RegionState.State.OPEN)
-136.stream().anyMatch(r -> 
RegionInfo.COMPARATOR.compare(r, hri) == 0));
-137
assertFalse(regionStates.getRegionByStateOfTable(TableName.valueOf("I_am_the_phantom"))
-138
.get(RegionState.State.OPEN).stream().anyMatch(r -> 
RegionInfo.COMPARATOR.compare(r, hri) == 0));
-139  }
-140
-141  @Test
-142  public void testMoveRegion() throws 
Exception {
-143admin.balancerSwitch(false).join();
-144
-145RegionInfo hri = 
createTableAndGetOneRegion(tableName);
-146RawAsyncHBaseAdmin rawAdmin = 
(RawAsyncHBaseAdmin) ASYNC_CONN.getAdmin();
-147ServerName serverName = 
rawAdmin.getRe

[26/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
index bf375f2..ad31b71 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
@@ -115,7 +115,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class RegionCoprocessorHost
+public class RegionCoprocessorHost
 extends CoprocessorHost
 Implements the coprocessor environment and runtime support 
for coprocessors
  loaded within a Region.
@@ -796,7 +796,7 @@ extends 
 
 LOG
-private static final org.slf4j.Logger LOG
+private static final org.slf4j.Logger LOG
 
 
 
@@ -805,7 +805,7 @@ extends 
 
 SHARED_DATA_MAP
-private static 
final org.apache.commons.collections4.map.ReferenceMapString,https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or interface in java.util.concurrent">ConcurrentMapString,https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object>> SHARED_DATA_MAP
+private static 
final org.apache.commons.collections4.map.ReferenceMapString,https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or interface in java.util.concurrent">ConcurrentMapString,https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object>> SHARED_DATA_MAP
 
 
 
@@ -814,7 +814,7 @@ extends 
 
 hasCustomPostScannerFilterRow
-private final boolean hasCustomPostScannerFilterRow
+private final boolean hasCustomPostScannerFilterRow
 
 
 
@@ -823,7 +823,7 @@ extends 
 
 rsServices
-RegionServerServices rsServices
+RegionServerServices rsServices
 The region server services
 
 
@@ -833,7 +833,7 @@ extends 
 
 region
-HRegion region
+HRegion region
 The region
 
 
@@ -843,7 +843,7 @@ extends 
 
 regionObserverGetter
-private CoprocessorHost.ObserverGetter 
regionObserverGetter
+private CoprocessorHost.ObserverGetter 
regionObserverGetter
 
 
 
@@ -852,7 +852,7 @@ extends 
 
 endpointObserverGetter
-private CoprocessorHost.ObserverGetter endpointObserverGetter
+private CoprocessorHost.ObserverGetter endpointObserverGetter
 
 
 
@@ -869,7 +869,7 @@ extends 
 
 RegionCoprocessorHost
-public RegionCoprocessorHost(HRegion region,
+public RegionCoprocessorHost(HRegion region,
  RegionServerServices rsServices,
  
org.apache.hadoop.conf.Configuration conf)
 Constructor
@@ -895,7 +895,7 @@ extends 
 
 getTableCoprocessorAttrsFromSchema
-static https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List getTableCoprocessorAttrsFromSchema(org.apache.hadoop.conf.Configuration conf,
+static https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List getTableCoprocessorAttrsFromSchema(org.apache.hadoop.conf.Configuration conf,

 TableDescriptor htd)
 
 
@@ -905,7 +905,7 @@ extends 
 
 testTableCoprocessorAttrs
-public static void testTableCoprocessorAttrs(org.apache.hadoop.conf.Configuration conf,
+public static void testTableCoprocessorAttrs(org.apache.hadoop.conf.Configuration conf,
  TableDescriptor htd)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Sanity check the table coprocessor attributes of the 
supplied schema. Will
@@ -925,7 +925,7 @@ extends 
 
 loadTableCoprocessors
-void loadTableCoprocessors(org.apache.hadoop.conf.Configuration conf

[31/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html
 
b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html
index 6c32c5c..3c17832 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":42,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":9,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":42,"i52":42,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":9,"i64":10,"i65":10,"i66":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":42,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":9,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":42,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":42,"i51":42,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":9,"i63":10,"i64":10,"i65":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public static class TableDescriptorBuilder.ModifyableTableDescriptor
+public static class TableDescriptorBuilder.ModifyableTableDescriptor
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements TableDescriptor, https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable
 TODO: make this private after removing the 
HTableDescriptor
@@ -217,395 +217,391 @@ implements Method and Description
 
 
-TableDescriptorBuilder.ModifyableTableDescriptor
-addColumnFamily(ColumnFamilyDescriptor family)
-Adds a column family.
-
-
-
-TableDescriptorBuilder.ModifyableTableDescriptor
-addCoprocessor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String className)
-Add a table coprocessor to this table.
-
-
-
-TableDescriptorBuilder.ModifyableTableDescriptor
-addCoprocessor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String className,
-  org.apache.hadoop.fs.Path jarFilePath,
-  int priority,
-  https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String> kvs)
-Add a table coprocessor to this table.
-
-
-
-private TableDescriptorBuilder.ModifyableTableDescriptor
-addCoprocessorToMap(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String specStr)
-Add coprocessor to values Map
-
-
-
-TableDescriptorBuilder.ModifyableTableDescriptor
-addCoprocessorWithSpec(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String specStr)
-Add a table coprocessor to this table.
-
-
-
 private void
 checkHasCoprocessor(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String className) 
 
-
+
 int
 compareTo(TableDescriptorBuilder.ModifyableTableDescriptor other)
 Compares the descriptor with another descriptor which is 
passed as a
  parameter.
 
 
-
+
 boolean
 equals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Obje

[25/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index dcd9fce..a05534b 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -704,20 +704,20 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.regionserver.BloomType
 org.apache.hadoop.hbase.regionserver.HRegion.FlushResult.Result
-org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
-org.apache.hadoop.hbase.regionserver.ChunkCreator.ChunkType
+org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
+org.apache.hadoop.hbase.regionserver.BloomType
+org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
 org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.StepDirection
+org.apache.hadoop.hbase.regionserver.ChunkCreator.ChunkType
+org.apache.hadoop.hbase.regionserver.Region.Operation
+org.apache.hadoop.hbase.regionserver.TimeRangeTracker.Type
 org.apache.hadoop.hbase.regionserver.CompactingMemStore.IndexType
-org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
+org.apache.hadoop.hbase.regionserver.MemStoreCompactionStrategy.Action
+org.apache.hadoop.hbase.regionserver.FlushType
 org.apache.hadoop.hbase.regionserver.ScanType
 org.apache.hadoop.hbase.regionserver.ScannerContext.NextState
-org.apache.hadoop.hbase.regionserver.TimeRangeTracker.Type
-org.apache.hadoop.hbase.regionserver.FlushType
-org.apache.hadoop.hbase.regionserver.MemStoreCompactionStrategy.Action
-org.apache.hadoop.hbase.regionserver.Region.Operation
-org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
+org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index 2731576..b377318 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -130,9 +130,9 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
+org.apache.hadoop.hbase.regionserver.querymatcher.StripeCompactionScanQueryMatcher.DropDeletesInOutput
 org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode
 org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult
-org.apache.hadoop.hbase.regionserver.querymatcher.StripeCompactionScanQueryMatcher.DropDeletesInOutput
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
index f6fc79b..a4ab1b7 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
@@ -238,8 +238,8 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader.WALHdrR

[17/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/client/example/ExportEndpointExample.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/example/ExportEndpointExample.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/example/ExportEndpointExample.html
index 6553e14..f2c7811 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/example/ExportEndpointExample.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/example/ExportEndpointExample.html
@@ -64,8 +64,8 @@
 056 Admin admin = con.getAdmin()) 
{
 057  TableDescriptor desc = 
TableDescriptorBuilder.newBuilder(tableName)
 058  // MUST mount the export 
endpoint
-059  
.addCoprocessor(Export.class.getName())
-060  
.addColumnFamily(ColumnFamilyDescriptorBuilder.of(family))
+059  
.setCoprocessor(Export.class.getName())
+060  
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family))
 061  .build();
 062  admin.createTable(desc);
 063

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
index c0d1f4b..6e98f08 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
@@ -2165,7 +2165,7 @@
 2157}
 2158
 2159TableDescriptor newDesc = 
TableDescriptorBuilder
-2160
.newBuilder(old).addColumnFamily(column).build();
+2160
.newBuilder(old).setColumnFamily(column).build();
 2161return modifyTable(tableName, 
newDesc, nonceGroup, nonce);
 2162  }
 2163

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
index c0d1f4b..6e98f08 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.MasterStoppedException.html
@@ -2165,7 +2165,7 @@
 2157}
 2158
 2159TableDescriptor newDesc = 
TableDescriptorBuilder
-2160
.newBuilder(old).addColumnFamily(column).build();
+2160
.newBuilder(old).setColumnFamily(column).build();
 2161return modifyTable(tableName, 
newDesc, nonceGroup, nonce);
 2162  }
 2163

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
index c0d1f4b..6e98f08 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.RedirectServlet.html
@@ -2165,7 +2165,7 @@
 2157}
 2158
 2159TableDescriptor newDesc = 
TableDescriptorBuilder
-2160
.newBuilder(old).addColumnFamily(column).build();
+2160
.newBuilder(old).setColumnFamily(column).build();
 2161return modifyTable(tableName, 
newDesc, nonceGroup, nonce);
 2162  }
 2163

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
index c0d1f4b..6e98f08 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.html
@@ -2165,7 +2165,7 @@
 2157}
 2158
 2159TableDescriptor newDesc = 
TableDescriptorBuilder
-2160
.newBuilder(old).addColumnFamily(column).build();
+2160
.newBuilder(old).setColumnFamily(column).build();
 2161return modifyTable(tableName, 
newDesc, nonceGroup, nonce);
 2162  }
 2163

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/master/TableStateManager.TableStateNotFoundException.html
--

[14/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironmentForCoreCoprocessors.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironmentForCoreCoprocessors.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironmentForCoreCoprocessors.html
index b99f924..2bb6cea 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironmentForCoreCoprocessors.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironmentForCoreCoprocessors.html
@@ -37,1779 +37,1734 @@
 029import java.util.UUID;
 030import 
java.util.concurrent.ConcurrentHashMap;
 031import 
java.util.concurrent.ConcurrentMap;
-032import java.util.regex.Matcher;
-033
-034import 
org.apache.commons.collections4.map.AbstractReferenceMap;
-035import 
org.apache.commons.collections4.map.ReferenceMap;
-036import 
org.apache.hadoop.conf.Configuration;
-037import org.apache.hadoop.fs.FileSystem;
-038import org.apache.hadoop.fs.Path;
-039import org.apache.hadoop.hbase.Cell;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.Coprocessor;
-042import 
org.apache.hadoop.hbase.HBaseConfiguration;
-043import 
org.apache.hadoop.hbase.HConstants;
-044import 
org.apache.hadoop.hbase.RawCellBuilder;
-045import 
org.apache.hadoop.hbase.RawCellBuilderFactory;
-046import 
org.apache.hadoop.hbase.ServerName;
-047import 
org.apache.hadoop.hbase.SharedConnection;
-048import 
org.apache.hadoop.hbase.client.Append;
-049import 
org.apache.hadoop.hbase.client.Connection;
-050import 
org.apache.hadoop.hbase.client.Delete;
-051import 
org.apache.hadoop.hbase.client.Durability;
-052import 
org.apache.hadoop.hbase.client.Get;
-053import 
org.apache.hadoop.hbase.client.Increment;
-054import 
org.apache.hadoop.hbase.client.Mutation;
-055import 
org.apache.hadoop.hbase.client.Put;
-056import 
org.apache.hadoop.hbase.client.RegionInfo;
-057import 
org.apache.hadoop.hbase.client.Result;
-058import 
org.apache.hadoop.hbase.client.Scan;
-059import 
org.apache.hadoop.hbase.client.TableDescriptor;
-060import 
org.apache.hadoop.hbase.coprocessor.BaseEnvironment;
-061import 
org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
-062import 
org.apache.hadoop.hbase.coprocessor.CoprocessorException;
-063import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-064import 
org.apache.hadoop.hbase.coprocessor.CoprocessorService;
-065import 
org.apache.hadoop.hbase.coprocessor.CoprocessorServiceBackwardCompatiblity;
-066import 
org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
-067import 
org.apache.hadoop.hbase.coprocessor.EndpointObserver;
-068import 
org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
-069import 
org.apache.hadoop.hbase.coprocessor.MetricsCoprocessor;
-070import 
org.apache.hadoop.hbase.coprocessor.ObserverContext;
-071import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
-072import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-073import 
org.apache.hadoop.hbase.coprocessor.RegionObserver;
-074import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-075import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-076import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-077import 
org.apache.hadoop.hbase.io.Reference;
-078import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-079import 
org.apache.hadoop.hbase.metrics.MetricRegistry;
-080import 
org.apache.hadoop.hbase.regionserver.Region.Operation;
-081import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-082import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
-083import 
org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
-084import 
org.apache.hadoop.hbase.security.User;
-085import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.CoprocessorClassLoader;
-088import 
org.apache.hadoop.hbase.util.Pair;
-089import 
org.apache.hadoop.hbase.wal.WALEdit;
-090import 
org.apache.hadoop.hbase.wal.WALKey;
-091import 
org.apache.yetus.audience.InterfaceAudience;
-092import org.slf4j.Logger;
-093import org.slf4j.LoggerFactory;
-094
-095/**
-096 * Implements the coprocessor environment 
and runtime support for coprocessors
-097 * loaded within a {@link Region}.
-098 */
-099@InterfaceAudience.Private
-100public class RegionCoprocessorHost
-101extends 
CoprocessorHost {
-102
-103  private static final Logger LOG = 
LoggerFactory.getLogger(RegionCoprocessorHost.class);
-104  // The shared data map
-105  private static final 
ReferenceMap

[15/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html
index b99f924..2bb6cea 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html
@@ -37,1779 +37,1734 @@
 029import java.util.UUID;
 030import 
java.util.concurrent.ConcurrentHashMap;
 031import 
java.util.concurrent.ConcurrentMap;
-032import java.util.regex.Matcher;
-033
-034import 
org.apache.commons.collections4.map.AbstractReferenceMap;
-035import 
org.apache.commons.collections4.map.ReferenceMap;
-036import 
org.apache.hadoop.conf.Configuration;
-037import org.apache.hadoop.fs.FileSystem;
-038import org.apache.hadoop.fs.Path;
-039import org.apache.hadoop.hbase.Cell;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.Coprocessor;
-042import 
org.apache.hadoop.hbase.HBaseConfiguration;
-043import 
org.apache.hadoop.hbase.HConstants;
-044import 
org.apache.hadoop.hbase.RawCellBuilder;
-045import 
org.apache.hadoop.hbase.RawCellBuilderFactory;
-046import 
org.apache.hadoop.hbase.ServerName;
-047import 
org.apache.hadoop.hbase.SharedConnection;
-048import 
org.apache.hadoop.hbase.client.Append;
-049import 
org.apache.hadoop.hbase.client.Connection;
-050import 
org.apache.hadoop.hbase.client.Delete;
-051import 
org.apache.hadoop.hbase.client.Durability;
-052import 
org.apache.hadoop.hbase.client.Get;
-053import 
org.apache.hadoop.hbase.client.Increment;
-054import 
org.apache.hadoop.hbase.client.Mutation;
-055import 
org.apache.hadoop.hbase.client.Put;
-056import 
org.apache.hadoop.hbase.client.RegionInfo;
-057import 
org.apache.hadoop.hbase.client.Result;
-058import 
org.apache.hadoop.hbase.client.Scan;
-059import 
org.apache.hadoop.hbase.client.TableDescriptor;
-060import 
org.apache.hadoop.hbase.coprocessor.BaseEnvironment;
-061import 
org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
-062import 
org.apache.hadoop.hbase.coprocessor.CoprocessorException;
-063import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-064import 
org.apache.hadoop.hbase.coprocessor.CoprocessorService;
-065import 
org.apache.hadoop.hbase.coprocessor.CoprocessorServiceBackwardCompatiblity;
-066import 
org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
-067import 
org.apache.hadoop.hbase.coprocessor.EndpointObserver;
-068import 
org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
-069import 
org.apache.hadoop.hbase.coprocessor.MetricsCoprocessor;
-070import 
org.apache.hadoop.hbase.coprocessor.ObserverContext;
-071import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
-072import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-073import 
org.apache.hadoop.hbase.coprocessor.RegionObserver;
-074import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-075import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-076import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-077import 
org.apache.hadoop.hbase.io.Reference;
-078import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-079import 
org.apache.hadoop.hbase.metrics.MetricRegistry;
-080import 
org.apache.hadoop.hbase.regionserver.Region.Operation;
-081import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-082import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
-083import 
org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
-084import 
org.apache.hadoop.hbase.security.User;
-085import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.CoprocessorClassLoader;
-088import 
org.apache.hadoop.hbase.util.Pair;
-089import 
org.apache.hadoop.hbase.wal.WALEdit;
-090import 
org.apache.hadoop.hbase.wal.WALKey;
-091import 
org.apache.yetus.audience.InterfaceAudience;
-092import org.slf4j.Logger;
-093import org.slf4j.LoggerFactory;
-094
-095/**
-096 * Implements the coprocessor environment 
and runtime support for coprocessors
-097 * loaded within a {@link Region}.
-098 */
-099@InterfaceAudience.Private
-100public class RegionCoprocessorHost
-101extends 
CoprocessorHost {
-102
-103  private static final Logger LOG = 
LoggerFactory.getLogger(RegionCoprocessorHost.class);
-104  // The shared data map
-105  private static final 
ReferenceMap> SHARED_DATA_MAP 
=
-106  new 
ReferenceMap<>(AbstractReferenceMap.ReferenceSt

[06/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.html 
b/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.html
index 34c552c..cd795e1 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestAsyncRegionAdminApi
+public class TestAsyncRegionAdminApi
 extends TestAsyncAdminBase
 Class to test asynchronous region admin operations.
 
@@ -288,7 +288,7 @@ extends 
 
 CLASS_RULE
-public static final HBaseClassTestRule CLASS_RULE
+public static final HBaseClassTestRule CLASS_RULE
 
 
 
@@ -305,7 +305,7 @@ extends 
 
 TestAsyncRegionAdminApi
-public TestAsyncRegionAdminApi()
+public TestAsyncRegionAdminApi()
 
 
 
@@ -322,7 +322,7 @@ extends 
 
 testAssignRegionAndUnassignRegion
-public void testAssignRegionAndUnassignRegion()
+public void testAssignRegionAndUnassignRegion()
throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -336,7 +336,7 @@ extends 
 
 createTableAndGetOneRegion
-org.apache.hadoop.hbase.client.RegionInfo createTableAndGetOneRegion(org.apache.hadoop.hbase.TableName tableName)
+org.apache.hadoop.hbase.client.RegionInfo createTableAndGetOneRegion(org.apache.hadoop.hbase.TableName tableName)
   throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException,
  https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true";
 title="class or interface in java.lang">InterruptedException,
  https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutionException.html?is-external=true";
 title="class or interface in java.util.concurrent">ExecutionException
@@ -354,7 +354,7 @@ extends 
 
 testGetRegionByStateOfTable
-public void testGetRegionByStateOfTable()
+public void testGetRegionByStateOfTable()
  throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -368,7 +368,7 @@ extends 
 
 testMoveRegion
-public void testMoveRegion()
+public void testMoveRegion()
 throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -382,7 +382,7 @@ extends 
 
 testGetOnlineRegions
-public void testGetOnlineRegions()
+public void testGetOnlineRegions()
   throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -396,7 +396,7 @@ extends 
 
 testFlushTableAndRegion
-public void testFlushTableAndRegion()
+public void testFlushTableAndRegion()
  throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -410,7 +410,7 @@ extends 
 
 waitUntilMobCompactionFinished
-private void waitUntilMobCompactionFinished(org.apache.hadoop.hbase.TableName tableName)
+private void waitUntilMobCompactionFinished(org.apache.hadoop.hbase.TableName tableName)
  throws https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutionException.html?is-external=true";
 title="class or interface in java.util.concurrent">ExecutionException,
 https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true";
 title="class or interface in java.lang">InterruptedException
 
@@ -426,7 +426,7 @@ extends 
 
 testCompactMob
-public void testCompactMob()
+public void testCompactMob()
 throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -440,7 +440,7 @@ extends 
 
 testCompactRegionServer
-public void testCompactRegionServer()
+public void testCompactRegionServer()
  throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -454,7 +454,7 @@ extends 
 
 testCompact
-public void testCompact()
+pu

[18/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
index 05c0542..2d09bf8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
@@ -35,1393 +35,1419 @@
 027import java.util.HashSet;
 028import java.util.List;
 029import java.util.Map;
-030import java.util.Set;
-031import java.util.TreeMap;
-032import java.util.TreeSet;
-033import java.util.function.Function;
-034import java.util.regex.Matcher;
-035import org.apache.hadoop.fs.Path;
-036import 
org.apache.hadoop.hbase.Coprocessor;
-037import 
org.apache.hadoop.hbase.HConstants;
-038import 
org.apache.hadoop.hbase.TableName;
-039import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-040import 
org.apache.hadoop.hbase.security.User;
-041import 
org.apache.hadoop.hbase.util.Bytes;
-042import 
org.apache.yetus.audience.InterfaceAudience;
-043import org.slf4j.Logger;
-044import org.slf4j.LoggerFactory;
-045
-046import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-047import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-048
-049/**
-050 * @since 2.0.0
-051 */
-052@InterfaceAudience.Public
-053public class TableDescriptorBuilder {
-054  public static final Logger LOG = 
LoggerFactory.getLogger(TableDescriptorBuilder.class);
-055  @InterfaceAudience.Private
-056  public static final String SPLIT_POLICY 
= "SPLIT_POLICY";
-057  private static final Bytes 
SPLIT_POLICY_KEY = new Bytes(Bytes.toBytes(SPLIT_POLICY));
-058  /**
-059   * Used by HBase Shell interface to 
access this metadata
-060   * attribute which denotes the maximum 
size of the store file after which a
-061   * region split occurs.
-062   */
-063  @InterfaceAudience.Private
-064  public static final String MAX_FILESIZE 
= "MAX_FILESIZE";
-065  private static final Bytes 
MAX_FILESIZE_KEY
-066  = new 
Bytes(Bytes.toBytes(MAX_FILESIZE));
-067
-068  @InterfaceAudience.Private
-069  public static final String OWNER = 
"OWNER";
+030import java.util.Objects;
+031import java.util.Optional;
+032import java.util.Set;
+033import java.util.TreeMap;
+034import java.util.TreeSet;
+035import java.util.function.Function;
+036import java.util.regex.Matcher;
+037import java.util.regex.Pattern;
+038import 
org.apache.hadoop.hbase.Coprocessor;
+039import 
org.apache.hadoop.hbase.HConstants;
+040import 
org.apache.hadoop.hbase.TableName;
+041import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
+042import 
org.apache.hadoop.hbase.security.User;
+043import 
org.apache.hadoop.hbase.util.Bytes;
+044import 
org.apache.yetus.audience.InterfaceAudience;
+045import org.slf4j.Logger;
+046import org.slf4j.LoggerFactory;
+047
+048import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+049import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
+050
+051/**
+052 * @since 2.0.0
+053 */
+054@InterfaceAudience.Public
+055public class TableDescriptorBuilder {
+056  public static final Logger LOG = 
LoggerFactory.getLogger(TableDescriptorBuilder.class);
+057  @InterfaceAudience.Private
+058  public static final String SPLIT_POLICY 
= "SPLIT_POLICY";
+059  private static final Bytes 
SPLIT_POLICY_KEY = new Bytes(Bytes.toBytes(SPLIT_POLICY));
+060  /**
+061   * Used by HBase Shell interface to 
access this metadata
+062   * attribute which denotes the maximum 
size of the store file after which a
+063   * region split occurs.
+064   */
+065  @InterfaceAudience.Private
+066  public static final String MAX_FILESIZE 
= "MAX_FILESIZE";
+067  private static final Bytes 
MAX_FILESIZE_KEY
+068  = new 
Bytes(Bytes.toBytes(MAX_FILESIZE));
+069
 070  @InterfaceAudience.Private
-071  public static final Bytes OWNER_KEY
-072  = new 
Bytes(Bytes.toBytes(OWNER));
-073
-074  /**
-075   * Used by rest interface to access 
this metadata attribute
-076   * which denotes if the table is Read 
Only.
-077   */
-078  @InterfaceAudience.Private
-079  public static final String READONLY = 
"READONLY";
-080  private static final Bytes 
READONLY_KEY
-081  = new 
Bytes(Bytes.toBytes(READONLY));
-082
-083  /**
-084   * Used by HBase Shell interface to 
access this metadata
-085   * attribute which denotes if the table 
is compaction enabled.
-086   */
-087  @InterfaceAudience.Private
-088  public static final String 
COMPACTION_ENABLED = "COMPACTION_ENABLED";
-089  private static final Bytes 
COMPACTION_ENABLED_KEY
-090  = new 
Bytes(Bytes.toBytes(COMPACTION_ENABLED));
-091
-092  /**
-093   * Used by HBase Shell interface to 
access this metadata
-094   * attribute which represents the 
maximum 

[43/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
index 26b9cc0..6fa64ff 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
@@ -949,394 +949,411 @@
 941*/
 942  public static final float 
HBASE_CLUSTER_MINIMUM_MEMORY_THRESHOLD = 0.2f;
 943
-944  public static final Pattern 
CP_HTD_ATTR_KEY_PATTERN =
-945  
Pattern.compile("^coprocessor\\$([0-9]+)$", Pattern.CASE_INSENSITIVE);
-946
-947  /**
-948   * 
-949   * Pattern that matches a coprocessor 
specification. Form is:
-950   * {@code  '|'  ['|'  ['|' 
]]}
-951   * where arguments are {@code 
 '='  [,...]}
-952   * For example: {@code 
hdfs:///foo.jar|com.foo.FooRegionObserver|1001|arg1=1,arg2=2}
-953   * 
-954 */ -955 public static final Pattern CP_HTD_ATTR_VALUE_PATTERN = -956 Pattern.compile("(^[^\\|]*)\\|([^\\|]+)\\|[\\s]*([\\d]*)[\\s]*(\\|.*)?$"); -957 -958 public static final String CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN = "[^=,]+"; -959 public static final String CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN = "[^,]+"; -960 public static final Pattern CP_HTD_ATTR_VALUE_PARAM_PATTERN = Pattern.compile( -961 "(" + CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN + ")=(" + -962 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN + "),?"); -963 public static final String CP_HTD_ATTR_INCLUSION_KEY = -964 "hbase.coprocessor.classloader.included.classes"; -965 -966 /** The delay when re-trying a socket operation in a loop (HBASE-4712) */ -967 public static final int SOCKET_RETRY_WAIT_MS = 200; -968 -969 /** Host name of the local machine */ -970 public static final String LOCALHOST = "localhost"; -971 -972 /** -973 * If this parameter is set to true, then hbase will read -974 * data and then verify checksums. Checksum verification -975 * inside hdfs will be switched off. However, if the hbase-checksum -976 * verification fails, then it will switch back to using -977 * hdfs checksums for verifiying data that is being read from storage. -978 * -979 * If this parameter is set to false, then hbase will not -980 * verify any checksums, instead it will depend on checksum verification -981 * being done in the hdfs client. -982 */ -983 public static final String HBASE_CHECKSUM_VERIFICATION = -984 "hbase.regionserver.checksum.verify"; +944 /** +945 * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. +946 */ +947 @Deprecated +948 public static final Pattern CP_HTD_ATTR_KEY_PATTERN = +949 Pattern.compile("^coprocessor\\$([0-9]+)$", Pattern.CASE_INSENSITIVE); +950 +951 /** +952 *
+953   * Pattern that matches a coprocessor 
specification. Form is:
+954   * {@code  '|'  ['|'  ['|' 
]]}
+955   * where arguments are {@code 
 '='  [,...]}
+956   * For example: {@code 
hdfs:///foo.jar|com.foo.FooRegionObserver|1001|arg1=1,arg2=2}
+957   * 
+958 * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. +959 */ +960 @Deprecated +961 public static final Pattern CP_HTD_ATTR_VALUE_PATTERN = +962 Pattern.compile("(^[^\\|]*)\\|([^\\|]+)\\|[\\s]*([\\d]*)[\\s]*(\\|.*)?$"); +963 /** +964 * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. +965 */ +966 @Deprecated +967 public static final String CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN = "[^=,]+"; +968 /** +969 * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. +970 */ +971 @Deprecated +972 public static final String CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN = "[^,]+"; +973 /** +974 * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. +975 */ +976 @Deprecated +977 public static final Pattern CP_HTD_ATTR_VALUE_PARAM_PATTERN = Pattern.compile( +978 "(" + CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN + ")=(" + +979 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN + "),?"); +980 public static final String CP_HTD_ATTR_INCLUSION_KEY = +981 "hbase.coprocessor.classloader.included.classes"; +982 +983 /** The delay when re-trying a socket operation in a loop (HBASE-4712) */ +984 public static final int SOCKET_RETRY_WAIT_MS = 200; 985 -986 public static final String LOCALHOST_IP = "127.0.0.1"; -987 -988 public static final String REGION_SERVER_HANDLER_COUNT = "hbase.regionserver.handler.count"; -989 public static final int DEFAULT_REGION_SERVER_HANDLER_COUNT = 30; -990 -991 /* -992 * REGION_SERVER_HAND

[05/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/testdevapidocs/org/apache/hadoop/hbase/client/package-summary.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/client/package-summary.html 
b/testdevapidocs/org/apache/hadoop/hbase/client/package-summary.html
index d43317c..3385a0a 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/client/package-summary.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/client/package-summary.html
@@ -742,497 +742,501 @@
  
 
 
-TestDelayingRunner
+TestCoprocessorDescriptor
  
 
 
-TestDeleteTimeStamp
+TestDelayingRunner
  
 
 
+TestDeleteTimeStamp
+ 
+
+
 TestDropTimeoutRequest
 
 Test a drop timeout request.
 
 
-
+
 TestDropTimeoutRequest.SleepLongerAtFirstCoprocessor
 
 Coprocessor that sleeps a while the first time you do a 
Get
 
 
-
+
 TestEnableTable
  
 
-
+
 TestEnableTable.MasterSyncObserver
  
 
-
+
 TestFastFail
  
 
-
+
 TestFastFail.CallQueueTooBigPffeInterceptor
  
 
-
+
 TestFastFail.MyPreemptiveFastFailInterceptor
  
 
-
+
 TestFlushFromClient
  
 
-
+
 TestFromClientSide
 
 Run tests that use the HBase clients; 
Table.
 
 
-
+
 TestFromClientSide3
  
 
-
+
 TestFromClientSide3.WaitingForMultiMutationsObserver
  
 
-
+
 TestFromClientSide3.WaitingForScanObserver
  
 
-
+
 TestFromClientSideNoCodec
 
 Do some ops and prove that client and server can work w/o 
codecs; that we can pb all the time.
 
 
-
+
 TestFromClientSideScanExcpetion
  
 
-
+
 TestFromClientSideScanExcpetion.MyHRegion
  
 
-
+
 TestFromClientSideScanExcpetion.MyHStore
  
 
-
+
 TestFromClientSideScanExcpetion.MyStoreScanner
  
 
-
+
 TestFromClientSideScanExcpetionWithCoprocessor
 
 Test all client operations with a coprocessor that just 
implements the default flush/compact/scan
  policy.
 
 
-
+
 TestFromClientSideWithCoprocessor
 
 Test all client operations with a coprocessor that just 
implements the default flush/compact/scan
  policy.
 
 
-
+
 TestGet
  
 
-
+
 TestGetProcedureResult
 
 Testcase for HBASE-19608.
 
 
-
+
 TestGetProcedureResult.DummyProcedure
  
 
-
+
 TestHBaseAdminNoCluster
  
 
-
+
 TestHTableMultiplexer
  
 
-
+
 TestHTableMultiplexerFlushCache
  
 
-
+
 TestHTableMultiplexerViaMocks
  
 
-
+
 TestIllegalTableDescriptor
  
 
-
+
 TestImmutableHColumnDescriptor
  
 
-
+
 TestImmutableHRegionInfo
 
 Test ImmutableHRegionInfo
 
 
-
+
 TestImmutableHTableDescriptor
  
 
-
+
 TestIncrement
  
 
-
+
 TestIncrementFromClientSideWithCoprocessor
 
 Test all Increment client operations with a 
coprocessor that
  just implements the default flush/compact/scan policy.
 
 
-
+
 TestIncrementsFromClientSide
 
 Run Increment tests that use the HBase clients; 
HTable.
 
 
-
+
 TestInterfaceAlign
  
 
-
+
 TestIntraRowPagination
 
 Test scan/get offset and limit settings within one row 
through HRegion API.
 
 
-
+
 TestLeaseRenewal
  
 
-
+
 TestLimitedScanWithFilter
 
 With filter we may stop at a middle of row and think that 
we still have more cells for the
  current row but actually all the remaining cells will be filtered out by the 
filter.
 
 
-
+
 TestMalformedCellFromClient
 
 The purpose of this test is to ensure whether rs deals with 
the malformed cells correctly.
 
 
-
+
 TestMetaCache
  
 
-
+
 TestMetaCache.CallQueueTooBigExceptionInjector
 
 Throws CallQueueTooBigException for all gets.
 
 
-
+
 TestMetaCache.ExceptionInjector
  
 
-
+
 TestMetaCache.FakeRSRpcServices
  
 
-
+
 TestMetaCache.RegionServerWithFakeRpcServices
  
 
-
+
 TestMetaCache.RoundRobinExceptionInjector
 
 Rotates through the possible cache clearing and non-cache 
clearing exceptions
  for requests.
 
 
-
+
 TestMetaWithReplicas
 
 Tests the scenarios where replicas are enabled for the meta 
table
 
 
-
+
 TestMetricsConnection
  
 
-
+
 TestMobCloneSnapshotFromClient
 
 Test clone snapshots from the client
 
 
-
+
 TestMobCloneSnapshotFromClient.DelayFlushCoprocessor
 
 This coprocessor is used to delay the flush.
 
 
-
+
 TestMobRestoreSnapshotFromClient
 
 Test restore snapshots from the client
 
 
-
+
 TestMobSnapshotCloneIndependence
 
 Test to verify that the cloned table is independent of the 
table from which it was cloned
 
 
-
+
 TestMobSnapshotFromClient
 
 Test create/using/deleting snapshots from the client
 
 
-
+
 TestMultiParallel
  
 
-
+
 TestMultiParallel.MyMasterObserver
  
 
-
+
 TestMultipleTimestamps
 
 Run tests related to TimestampsFilter using 
HBase client APIs.
 
 
-
+
 TestMultiRespectsLimits
 
 This test sets the multi size WAY low and then checks 
to make sure that gets will still make
  progress.
 
 
-
+
 TestMutation
  
 
-
+
 TestMvccConsistentScanner
  
 
-
+
 TestOperation
 
 Run tests that use the functionality of the Operation 
superclass for
  Puts, Gets, Deletes, Scans, and MultiPuts.
 
 
-
+
 TestProcedureFuture
  
 
-
+
 TestProcedureFuture.TestFuture
  
 
-
+
 TestPutDeleteEtcCellIteration
 
 Test that I can Iterate Client Actions that hold Cells (Get 

[29/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html
index 18597dd..d5a3666 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html
@@ -319,16 +319,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-TableDescriptorBuilder
-TableDescriptorBuilder.addColumnFamily(ColumnFamilyDescriptor family) 
-
-
-TableDescriptorBuilder.ModifyableTableDescriptor
-TableDescriptorBuilder.ModifyableTableDescriptor.addColumnFamily(ColumnFamilyDescriptor family)
-Adds a column family.
-
-
-
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 AsyncHBaseAdmin.addColumnFamily(TableName tableName,
ColumnFamilyDescriptor columnFamily) 
@@ -444,6 +434,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 TableDescriptorBuilder.ModifyableTableDescriptor.putColumnFamily(ColumnFamilyDescriptor family) 
 
 
+TableDescriptorBuilder
+TableDescriptorBuilder.setColumnFamily(ColumnFamilyDescriptor family) 
+
+
+TableDescriptorBuilder.ModifyableTableDescriptor
+TableDescriptorBuilder.ModifyableTableDescriptor.setColumnFamily(ColumnFamilyDescriptor family)
+Adds a column family.
+
+
+
 static byte[]
 ColumnFamilyDescriptorBuilder.toByteArray(ColumnFamilyDescriptor desc) 
 
@@ -466,6 +466,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 static https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true";
 title="class or interface in java.util">Comparator
 TableDescriptor.getComparator(https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true";
 title="class or interface in java.util">Comparator cfComparator) 
 
+
+TableDescriptorBuilder
+TableDescriptorBuilder.setColumnFamilies(https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection families) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/client/class-use/CoprocessorDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/CoprocessorDescriptor.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/CoprocessorDescriptor.html
new file mode 100644
index 000..e46c0f2
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/CoprocessorDescriptor.html
@@ -0,0 +1,266 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+Uses of Interface org.apache.hadoop.hbase.client.CoprocessorDescriptor 
(Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of 
Interfaceorg.apache.hadoop.hbase.client.CoprocessorDescriptor
+
+
+
+
+
+Packages that use CoprocessorDescriptor 
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase
+ 
+
+
+org.apache.hadoop.hbase.client
+
+Provides HBase Client
+
+
+
+
+
+
+
+
+
+
+Uses of CoprocessorDescriptor 
in org.apache.hadoop.hbase
+
+Methods in org.apache.hadoop.hbase
 that return types with arguments of type CoprocessorDescriptor 
+
+Modifier and Type
+Method and Description
+
+
+
+https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
+HTableDescriptor.getCoprocessorDescriptors()
+Deprecated. 
+ 
+
+
+
+
+
+
+
+Uses of CoprocessorDescriptor 
in org.apache.hadoop.hbase.client
+
+Classes in org.apache.hadoop.hbase.client
 that implement CoprocessorDescriptor 
+
+Modifier and Type
+Cla

[51/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/8b1eaec1
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/8b1eaec1
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/8b1eaec1

Branch: refs/heads/asf-site
Commit: 8b1eaec147abafc2b4eeef6c594bec615c579e58
Parents: 291f0a4
Author: jenkins 
Authored: Fri Mar 16 14:53:20 2018 +
Committer: jenkins 
Committed: Fri Mar 16 14:53:20 2018 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf|  3354 +--
 apidocs/allclasses-frame.html   | 2 +
 apidocs/allclasses-noframe.html | 2 +
 apidocs/deprecated-list.html|39 +-
 apidocs/index-all.html  |92 +-
 apidocs/org/apache/hadoop/hbase/HConstants.html |   271 +-
 .../apache/hadoop/hbase/HTableDescriptor.html   |   365 +-
 .../apache/hadoop/hbase/client/Consistency.html | 4 +-
 .../hbase/client/CoprocessorDescriptor.html |   281 +
 .../client/CoprocessorDescriptorBuilder.html|   320 +
 .../org/apache/hadoop/hbase/client/Cursor.html  | 4 +-
 .../hadoop/hbase/client/TableDescriptor.html|70 +-
 .../hbase/client/TableDescriptorBuilder.html|   236 +-
 .../class-use/ColumnFamilyDescriptor.html   |30 +-
 .../client/class-use/CoprocessorDescriptor.html |   236 +
 .../class-use/CoprocessorDescriptorBuilder.html |   184 +
 .../class-use/TableDescriptorBuilder.html   |69 +-
 .../hadoop/hbase/client/package-frame.html  | 2 +
 .../hadoop/hbase/client/package-summary.html|70 +-
 .../hadoop/hbase/client/package-tree.html   | 2 +
 .../apache/hadoop/hbase/client/package-use.html |23 +-
 .../hbase/coprocessor/package-summary.html  | 2 +-
 apidocs/overview-tree.html  | 2 +
 .../org/apache/hadoop/hbase/HConstants.html |   755 +-
 .../apache/hadoop/hbase/HTableDescriptor.html   |  1735 +-
 .../hbase/client/CoprocessorDescriptor.html |   123 +
 .../client/CoprocessorDescriptorBuilder.html|   190 +
 .../hadoop/hbase/client/TableDescriptor.html|   449 +-
 .../hbase/client/TableDescriptorBuilder.html|  2744 +-
 book.html   | 8 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 25738 -
 checkstyle.rss  |54 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/allclasses-frame.html| 3 +
 devapidocs/allclasses-noframe.html  | 3 +
 devapidocs/constant-values.html |20 +-
 devapidocs/deprecated-list.html |   201 +-
 devapidocs/index-all.html   |   188 +-
 .../org/apache/hadoop/hbase/HConstants.html |   273 +-
 .../apache/hadoop/hbase/HTableDescriptor.html   |   365 +-
 .../hadoop/hbase/backup/package-tree.html   | 2 +-
 .../apache/hadoop/hbase/client/Consistency.html | 4 +-
 .../hbase/client/CoprocessorDescriptor.html |   285 +
 ...riptorBuilder.CoprocessorDescriptorImpl.html |   444 +
 .../client/CoprocessorDescriptorBuilder.html|   450 +
 .../org/apache/hadoop/hbase/client/Cursor.html  | 4 +-
 .../hbase/client/ImmutableHTableDescriptor.html | 2 +-
 .../hadoop/hbase/client/TableDescriptor.html|70 +-
 ...riptorBuilder.ModifyableTableDescriptor.html |   410 +-
 .../hbase/client/TableDescriptorBuilder.html|   460 +-
 .../class-use/ColumnFamilyDescriptor.html   |24 +-
 .../client/class-use/CoprocessorDescriptor.html |   266 +
 ...riptorBuilder.CoprocessorDescriptorImpl.html |   125 +
 .../class-use/CoprocessorDescriptorBuilder.html |   184 +
 .../hbase/client/class-use/TableDescriptor.html | 2 +-
 ...riptorBuilder.ModifyableTableDescriptor.html |66 +-
 .../class-use/TableDescriptorBuilder.html   |69 +-
 .../hadoop/hbase/client/package-frame.html  | 3 +
 .../hadoop/hbase/client/package-summary.html|76 +-
 .../hadoop/hbase/client/package-tree.html   |25 +-
 .../apache/hadoop/hbase/client/package-use.html |49 +-
 .../class-use/MasterCoprocessorEnvironment.html | 2 +-
 .../coprocessor/class-use/ObserverContext.html  | 2 +-
 .../hbase/coprocessor/package-summary.html  | 2 +-
 .../hadoop/hbase/executor/package-tree.html | 2 +-
 .../hadoop/hbase/filter/package-tree.html   | 8

[23/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
index 26b9cc0..6fa64ff 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
@@ -949,394 +949,411 @@
 941*/
 942  public static final float 
HBASE_CLUSTER_MINIMUM_MEMORY_THRESHOLD = 0.2f;
 943
-944  public static final Pattern 
CP_HTD_ATTR_KEY_PATTERN =
-945  
Pattern.compile("^coprocessor\\$([0-9]+)$", Pattern.CASE_INSENSITIVE);
-946
-947  /**
-948   * 
-949   * Pattern that matches a coprocessor 
specification. Form is:
-950   * {@code  '|'  ['|'  ['|' 
]]}
-951   * where arguments are {@code 
 '='  [,...]}
-952   * For example: {@code 
hdfs:///foo.jar|com.foo.FooRegionObserver|1001|arg1=1,arg2=2}
-953   * 
-954 */ -955 public static final Pattern CP_HTD_ATTR_VALUE_PATTERN = -956 Pattern.compile("(^[^\\|]*)\\|([^\\|]+)\\|[\\s]*([\\d]*)[\\s]*(\\|.*)?$"); -957 -958 public static final String CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN = "[^=,]+"; -959 public static final String CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN = "[^,]+"; -960 public static final Pattern CP_HTD_ATTR_VALUE_PARAM_PATTERN = Pattern.compile( -961 "(" + CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN + ")=(" + -962 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN + "),?"); -963 public static final String CP_HTD_ATTR_INCLUSION_KEY = -964 "hbase.coprocessor.classloader.included.classes"; -965 -966 /** The delay when re-trying a socket operation in a loop (HBASE-4712) */ -967 public static final int SOCKET_RETRY_WAIT_MS = 200; -968 -969 /** Host name of the local machine */ -970 public static final String LOCALHOST = "localhost"; -971 -972 /** -973 * If this parameter is set to true, then hbase will read -974 * data and then verify checksums. Checksum verification -975 * inside hdfs will be switched off. However, if the hbase-checksum -976 * verification fails, then it will switch back to using -977 * hdfs checksums for verifiying data that is being read from storage. -978 * -979 * If this parameter is set to false, then hbase will not -980 * verify any checksums, instead it will depend on checksum verification -981 * being done in the hdfs client. -982 */ -983 public static final String HBASE_CHECKSUM_VERIFICATION = -984 "hbase.regionserver.checksum.verify"; +944 /** +945 * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. +946 */ +947 @Deprecated +948 public static final Pattern CP_HTD_ATTR_KEY_PATTERN = +949 Pattern.compile("^coprocessor\\$([0-9]+)$", Pattern.CASE_INSENSITIVE); +950 +951 /** +952 *
+953   * Pattern that matches a coprocessor 
specification. Form is:
+954   * {@code  '|'  ['|'  ['|' 
]]}
+955   * where arguments are {@code 
 '='  [,...]}
+956   * For example: {@code 
hdfs:///foo.jar|com.foo.FooRegionObserver|1001|arg1=1,arg2=2}
+957   * 
+958 * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. +959 */ +960 @Deprecated +961 public static final Pattern CP_HTD_ATTR_VALUE_PATTERN = +962 Pattern.compile("(^[^\\|]*)\\|([^\\|]+)\\|[\\s]*([\\d]*)[\\s]*(\\|.*)?$"); +963 /** +964 * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. +965 */ +966 @Deprecated +967 public static final String CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN = "[^=,]+"; +968 /** +969 * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. +970 */ +971 @Deprecated +972 public static final String CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN = "[^,]+"; +973 /** +974 * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. +975 */ +976 @Deprecated +977 public static final Pattern CP_HTD_ATTR_VALUE_PARAM_PATTERN = Pattern.compile( +978 "(" + CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN + ")=(" + +979 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN + "),?"); +980 public static final String CP_HTD_ATTR_INCLUSION_KEY = +981 "hbase.coprocessor.classloader.included.classes"; +982 +983 /** The delay when re-trying a socket operation in a loop (HBASE-4712) */ +984 public static final int SOCKET_RETRY_WAIT_MS = 200; 985 -986 public static final String LOCALHOST_IP = "127.0.0.1"; -987 -988 public static final String REGION_SERVER_HANDLER_COUNT = "hbase.regionserver.handler.coun

[28/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
index 3377afb..78a22ea 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
@@ -262,163 +262,169 @@
 
 
 
+CoprocessorDescriptor
+
+CoprocessorDescriptor contains the details about how to 
build a coprocessor.
+
+
+
 HBaseAdmin.ProcedureFuture.WaitForStateCallable
  
 
-
+
 MasterKeepAliveConnection
 
 A KeepAlive connection is not physically closed immediately 
after the close,
   but rather kept alive for a few minutes.
 
 
-
+
 MetricsConnection.NewMetric
 
 A lambda for dispatching to the appropriate metric factory 
method
 
 
-
+
 NonceGenerator
 
 NonceGenerator interface.
 
 
-
+
 RawAsyncHBaseAdmin.AdminRpcCall
  
 
-
+
 RawAsyncHBaseAdmin.Converter
  
 
-
+
 RawAsyncHBaseAdmin.MasterRpcCall
  
 
-
+
 RawAsyncHBaseAdmin.TableOperator
  
 
-
+
 RawAsyncTableImpl.Converter
  
 
-
+
 RawAsyncTableImpl.NoncedConverter
  
 
-
+
 RawAsyncTableImpl.RpcCall
  
 
-
+
 RegionInfo
 
 Information about a region.
 
 
-
+
 RegionLocator
 
 Used to view region location information for a single HBase 
table.
 
 
-
+
 RequestController
 
 An interface for client request scheduling algorithm.
 
 
-
+
 RequestController.Checker
 
 Picks up the valid data.
 
 
-
+
 ResultScanner
 
 Interface for client-side scanning.
 
 
-
+
 RetryingCallable
 
 A Callable that will be retried.
 
 
-
+
 Row
 
 Has a row.
 
 
-
+
 RowAccess
 
 Provide a way to access the inner buffer.
 
 
-
+
 RpcRetryingCaller
  
 
-
+
 ScanResultCache
 
 Used to separate the row constructing logic.
 
 
-
+
 ScanResultConsumer
 
 Receives Result for an asynchronous 
scan.
 
 
-
+
 ScanResultConsumerBase
 
 The base interface for scan result consumer.
 
 
-
+
 ServiceCaller
 
 Delegate to a protobuf rpc call.
 
 
-
+
 SimpleRequestController.RowChecker
 
 Provide a way to control the flow of rows iteration.
 
 
-
+
 StatisticTrackable
 
 Parent interface for an object to get updates about 
per-region statistics.
 
 
-
+
 Table
 
 Used to communicate with a single HBase table.
 
 
-
+
 Table.CheckAndMutateBuilder
 
 A helper class for sending checkAndMutate request.
 
 
-
+
 TableBuilder
 
 For creating Table 
instance.
 
 
-
+
 TableDescriptor
 
 TableDescriptor contains the details about an HBase table 
such as the descriptors of
@@ -427,7 +433,7 @@
  when the region split should occur, coprocessors associated with it 
etc...
 
 
-
+
 ZKAsyncRegistry.Converter
  
 
@@ -817,6 +823,16 @@
 
 
 
+CoprocessorDescriptorBuilder
+
+Used to build the CoprocessorDescriptor
+
+
+
+CoprocessorDescriptorBuilder.CoprocessorDescriptorImpl
+ 
+
+
 Cursor
 
 Scan cursor to tell client where server is scanning

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index 15773e2..be313d2 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -196,6 +196,8 @@
 org.apache.hadoop.hbase.client.ConnectionImplementation.ServerErrorTracker
 org.apache.hadoop.hbase.client.ConnectionImplementation.ServerErrorTracker.ServerErrors
 org.apache.hadoop.hbase.client.ConnectionUtils
+org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder
+org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder.CoprocessorDescriptorImpl
 (implements org.apache.hadoop.hbase.client.CoprocessorDescriptor)
 org.apache.hadoop.hbase.client.Cursor
 org.apache.hadoop.hbase.client.DelayingRunner (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable)
 org.apache.hadoop.hbase.client.FailureInfo
@@ -500,6 +502,7 @@
 org.apache.hadoop.hbase.client.Row
 
 
+org.apache.hadoop.hbase.client.CoprocessorDescriptor
 org.apache.hadoop.hbase.client.HBaseAdmin.ProcedureFuture.WaitForStateCallable
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true";
 title="class or interface in java.lang">Iterable
 
@@ -547,24 +550,24 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable<

[39/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/book.html
--
diff --git a/book.html b/book.html
index d7b547c..da650d1 100644
--- a/book.html
+++ b/book.html
@@ -29324,8 +29324,10 @@ It was developed by Yahoo! and they run it at scale on 
their large grid cluster.
 See http://www.slideshare.net/HBaseCon/keynote-apache-hbase-at-yahoo-scale";>HBase
 at Yahoo! Scale.
 
 
-RSGroups can be defined and managed with shell commands or corresponding 
Java
-APIs. A server can be added to a group with hostname and port pair and tables
+RSGroups are defined and managed with shell commands. The shell drives a
+Coprocessor Endpoint whose API is marked private given this is an evolving
+feature; the Coprocessor API is not for public consumption.
+A server can be added to a group with hostname and port pair and tables
 can be moved to this group so that only regionservers in the same rsgroup can
 host the regions of the table. RegionServers and tables can only belong to one
 rsgroup at a time. By default, all tables and regionservers belong to the
@@ -37286,7 +37288,7 @@ The server will return cellblocks compressed using this 
same compressor as long
 
 
 Version 3.0.0-SNAPSHOT
-Last updated 2018-03-15 14:29:45 UTC
+Last updated 2018-03-16 14:29:41 UTC
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index e848ac1..2e76b2c 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase –  
   Bulk Loads in Apache HBase (TM)
@@ -299,7 +299,7 @@ under the License. -->
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-03-15
+  Last Published: 
2018-03-16
 
 
 



[10/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/replication/TableReplicationStorageBase.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/TableReplicationStorageBase.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/TableReplicationStorageBase.html
index ab343c1..9bafd71 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/replication/TableReplicationStorageBase.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/replication/TableReplicationStorageBase.html
@@ -90,32 +90,32 @@
 082
conf.getInt(HConstants.HBASE_META_BLOCK_SIZE, 
HConstants.DEFAULT_HBASE_META_BLOCK_SIZE);
 083return TableDescriptorBuilder
 084.newBuilder(REPLICATION_TABLE)
-085.addColumnFamily(
+085.setColumnFamily(
 086  
ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_PEER).setMaxVersions(metaMaxVersion)
 087  
.setInMemory(true).setBlocksize(metaBlockSize)
 088  
.setScope(HConstants.REPLICATION_SCOPE_LOCAL).setBloomFilterType(BloomType.NONE)
 089  .build())
-090.addColumnFamily(
+090.setColumnFamily(
 091  
ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_RS_STATE).setMaxVersions(metaMaxVersion)
 092  
.setInMemory(true).setBlocksize(metaBlockSize)
 093  
.setScope(HConstants.REPLICATION_SCOPE_LOCAL).setBloomFilterType(BloomType.NONE)
 094  .build())
-095.addColumnFamily(
+095.setColumnFamily(
 096  
ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_QUEUE).setMaxVersions(metaMaxVersion)
 097  
.setInMemory(true).setBlocksize(metaBlockSize)
 098  
.setScope(HConstants.REPLICATION_SCOPE_LOCAL).setBloomFilterType(BloomType.NONE)
 099  .build())
-100.addColumnFamily(
+100.setColumnFamily(
 101  
ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_WAL)
 102  
.setMaxVersions(HConstants.ALL_VERSIONS).setInMemory(true)
 103  
.setBlocksize(metaBlockSize).setScope(HConstants.REPLICATION_SCOPE_LOCAL)
 104  
.setBloomFilterType(BloomType.NONE).build())
-105.addColumnFamily(
+105.setColumnFamily(
 106  
ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_REGIONS).setMaxVersions(metaMaxVersion)
 107  
.setInMemory(true).setBlocksize(metaBlockSize)
 108  
.setScope(HConstants.REPLICATION_SCOPE_LOCAL).setBloomFilterType(BloomType.NONE)
 109  .build())
-110.addColumnFamily(
+110.setColumnFamily(
 111  
ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_HFILE_REFS)
 112  
.setMaxVersions(metaMaxVersion).setInMemory(true).setBlocksize(metaBlockSize)
 113  
.setScope(HConstants.REPLICATION_SCOPE_LOCAL).setBloomFilterType(BloomType.NONE)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
index eeb8705..ea9558d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.OpType.html
@@ -1136,7 +1136,7 @@
 1128
setScope(HConstants.REPLICATION_SCOPE_LOCAL).build();
 1129TableDescriptor td =
 1130
TableDescriptorBuilder.newBuilder(AccessControlLists.ACL_TABLE_NAME).
-1131addColumnFamily(cfd).build();
+1131  
setColumnFamily(cfd).build();
 1132admin.createTable(td);
 1133  }
 1134

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
index eeb8705..ea9558d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/access/AccessController.html
@@ -1136,7 +1136,7 @@
 1128
setScope(HConstants.REPLICATION_SCOPE_LOCAL).build();
 1129TableDescriptor td =
 1130
TableDescriptorBuilder.newBuilder(AccessControlLists.ACL_TABLE_NAME).
-1131addColumnFamily(cfd).build();
+1131  
setColumnFamily(cfd).build();
 1132admin.createTable(td);
 1133  }
 1134

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/

[27/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html
index 067b5c4..beb9075 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionEnvironment.html
@@ -126,7 +126,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class RegionCoprocessorHost.RegionEnvironment
+private static class RegionCoprocessorHost.RegionEnvironment
 extends BaseEnvironment
 implements RegionCoprocessorEnvironment
 Encapsulation of the environment of each coprocessor
@@ -303,7 +303,7 @@ implements 
 
 region
-private Region region
+private Region region
 
 
 
@@ -312,7 +312,7 @@ implements 
 
 sharedData
-https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or interface in java.util.concurrent">ConcurrentMapString,https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object> sharedData
+https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or interface in java.util.concurrent">ConcurrentMapString,https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object> sharedData
 
 
 
@@ -321,7 +321,7 @@ implements 
 
 metricRegistry
-private final MetricRegistry metricRegistry
+private final MetricRegistry metricRegistry
 
 
 
@@ -330,7 +330,7 @@ implements 
 
 services
-private final RegionServerServices services
+private final RegionServerServices services
 
 
 
@@ -347,7 +347,7 @@ implements 
 
 RegionEnvironment
-public RegionEnvironment(RegionCoprocessor impl,
+public RegionEnvironment(RegionCoprocessor impl,
  int priority,
  int seq,
  org.apache.hadoop.conf.Configuration conf,
@@ -376,7 +376,7 @@ implements 
 
 getRegion
-public Region getRegion()
+public Region getRegion()
 
 Specified by:
 getRegion in
 interface RegionCoprocessorEnvironment
@@ -391,7 +391,7 @@ implements 
 
 getOnlineRegions
-public OnlineRegions getOnlineRegions()
+public OnlineRegions getOnlineRegions()
 
 Specified by:
 getOnlineRegions in
 interface RegionCoprocessorEnvironment
@@ -406,7 +406,7 @@ implements 
 
 getConnection
-public Connection getConnection()
+public Connection getConnection()
 Description copied from 
interface: RegionCoprocessorEnvironment
 Returns the hosts' Connection to the Cluster. Do not 
close! This is a shared connection
  with the hosting server. Throws https://docs.oracle.com/javase/8/docs/api/java/lang/UnsupportedOperationException.html?is-external=true";
 title="class or interface in 
java.lang">UnsupportedOperationException if you try to close
@@ -445,7 +445,7 @@ implements 
 
 createConnection
-public Connection createConnection(org.apache.hadoop.conf.Configuration conf)
+public Connection createConnection(org.apache.hadoop.conf.Configuration conf)
 throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Description copied from 
interface: RegionCoprocessorEnvironment
 Creates a cluster connection using the passed Configuration.
@@ -481,7 +481,7 @@ implements 
 
 getServerName
-public ServerName getServerName()
+public ServerName getServerName()
 
 Specified by:
 getServerName in
 interface RegionCoprocessorEnvironment
@@ -496,7 +496,7 @@ implements 
 
 shutdown
-public void shutdown()
+public void shutdown()
 Description copied from 
class: BaseEnvironment
 Clean up the environment
 
@@ -511,7 +511,7 @@ implements 
 
 getSharedData
-public https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or interface in java.util.concurrent">ConcurrentMapString,https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object> getSharedData()
+public https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true";
 title="class or inte

[11/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
index b99f924..2bb6cea 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.html
@@ -37,1779 +37,1734 @@
 029import java.util.UUID;
 030import 
java.util.concurrent.ConcurrentHashMap;
 031import 
java.util.concurrent.ConcurrentMap;
-032import java.util.regex.Matcher;
-033
-034import 
org.apache.commons.collections4.map.AbstractReferenceMap;
-035import 
org.apache.commons.collections4.map.ReferenceMap;
-036import 
org.apache.hadoop.conf.Configuration;
-037import org.apache.hadoop.fs.FileSystem;
-038import org.apache.hadoop.fs.Path;
-039import org.apache.hadoop.hbase.Cell;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.Coprocessor;
-042import 
org.apache.hadoop.hbase.HBaseConfiguration;
-043import 
org.apache.hadoop.hbase.HConstants;
-044import 
org.apache.hadoop.hbase.RawCellBuilder;
-045import 
org.apache.hadoop.hbase.RawCellBuilderFactory;
-046import 
org.apache.hadoop.hbase.ServerName;
-047import 
org.apache.hadoop.hbase.SharedConnection;
-048import 
org.apache.hadoop.hbase.client.Append;
-049import 
org.apache.hadoop.hbase.client.Connection;
-050import 
org.apache.hadoop.hbase.client.Delete;
-051import 
org.apache.hadoop.hbase.client.Durability;
-052import 
org.apache.hadoop.hbase.client.Get;
-053import 
org.apache.hadoop.hbase.client.Increment;
-054import 
org.apache.hadoop.hbase.client.Mutation;
-055import 
org.apache.hadoop.hbase.client.Put;
-056import 
org.apache.hadoop.hbase.client.RegionInfo;
-057import 
org.apache.hadoop.hbase.client.Result;
-058import 
org.apache.hadoop.hbase.client.Scan;
-059import 
org.apache.hadoop.hbase.client.TableDescriptor;
-060import 
org.apache.hadoop.hbase.coprocessor.BaseEnvironment;
-061import 
org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
-062import 
org.apache.hadoop.hbase.coprocessor.CoprocessorException;
-063import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-064import 
org.apache.hadoop.hbase.coprocessor.CoprocessorService;
-065import 
org.apache.hadoop.hbase.coprocessor.CoprocessorServiceBackwardCompatiblity;
-066import 
org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
-067import 
org.apache.hadoop.hbase.coprocessor.EndpointObserver;
-068import 
org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
-069import 
org.apache.hadoop.hbase.coprocessor.MetricsCoprocessor;
-070import 
org.apache.hadoop.hbase.coprocessor.ObserverContext;
-071import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
-072import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-073import 
org.apache.hadoop.hbase.coprocessor.RegionObserver;
-074import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-075import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-076import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-077import 
org.apache.hadoop.hbase.io.Reference;
-078import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-079import 
org.apache.hadoop.hbase.metrics.MetricRegistry;
-080import 
org.apache.hadoop.hbase.regionserver.Region.Operation;
-081import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-082import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
-083import 
org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
-084import 
org.apache.hadoop.hbase.security.User;
-085import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.CoprocessorClassLoader;
-088import 
org.apache.hadoop.hbase.util.Pair;
-089import 
org.apache.hadoop.hbase.wal.WALEdit;
-090import 
org.apache.hadoop.hbase.wal.WALKey;
-091import 
org.apache.yetus.audience.InterfaceAudience;
-092import org.slf4j.Logger;
-093import org.slf4j.LoggerFactory;
-094
-095/**
-096 * Implements the coprocessor environment 
and runtime support for coprocessors
-097 * loaded within a {@link Region}.
-098 */
-099@InterfaceAudience.Private
-100public class RegionCoprocessorHost
-101extends 
CoprocessorHost {
-102
-103  private static final Logger LOG = 
LoggerFactory.getLogger(RegionCoprocessorHost.class);
-104  // The shared data map
-105  private static final 
ReferenceMap> SHARED_DATA_MAP 
=
-106  new 
ReferenceMap<>(AbstractReferenceMap.ReferenceStrength.HARD,
-107  
AbstractReferenceMap.ReferenceStrength.WEAK);
-108
-109  // op

[22/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
index 26b9cc0..6fa64ff 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
@@ -949,394 +949,411 @@
 941*/
 942  public static final float 
HBASE_CLUSTER_MINIMUM_MEMORY_THRESHOLD = 0.2f;
 943
-944  public static final Pattern 
CP_HTD_ATTR_KEY_PATTERN =
-945  
Pattern.compile("^coprocessor\\$([0-9]+)$", Pattern.CASE_INSENSITIVE);
-946
-947  /**
-948   * 
-949   * Pattern that matches a coprocessor 
specification. Form is:
-950   * {@code  '|'  ['|'  ['|' 
]]}
-951   * where arguments are {@code 
 '='  [,...]}
-952   * For example: {@code 
hdfs:///foo.jar|com.foo.FooRegionObserver|1001|arg1=1,arg2=2}
-953   * 
-954 */ -955 public static final Pattern CP_HTD_ATTR_VALUE_PATTERN = -956 Pattern.compile("(^[^\\|]*)\\|([^\\|]+)\\|[\\s]*([\\d]*)[\\s]*(\\|.*)?$"); -957 -958 public static final String CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN = "[^=,]+"; -959 public static final String CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN = "[^,]+"; -960 public static final Pattern CP_HTD_ATTR_VALUE_PARAM_PATTERN = Pattern.compile( -961 "(" + CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN + ")=(" + -962 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN + "),?"); -963 public static final String CP_HTD_ATTR_INCLUSION_KEY = -964 "hbase.coprocessor.classloader.included.classes"; -965 -966 /** The delay when re-trying a socket operation in a loop (HBASE-4712) */ -967 public static final int SOCKET_RETRY_WAIT_MS = 200; -968 -969 /** Host name of the local machine */ -970 public static final String LOCALHOST = "localhost"; -971 -972 /** -973 * If this parameter is set to true, then hbase will read -974 * data and then verify checksums. Checksum verification -975 * inside hdfs will be switched off. However, if the hbase-checksum -976 * verification fails, then it will switch back to using -977 * hdfs checksums for verifiying data that is being read from storage. -978 * -979 * If this parameter is set to false, then hbase will not -980 * verify any checksums, instead it will depend on checksum verification -981 * being done in the hdfs client. -982 */ -983 public static final String HBASE_CHECKSUM_VERIFICATION = -984 "hbase.regionserver.checksum.verify"; +944 /** +945 * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. +946 */ +947 @Deprecated +948 public static final Pattern CP_HTD_ATTR_KEY_PATTERN = +949 Pattern.compile("^coprocessor\\$([0-9]+)$", Pattern.CASE_INSENSITIVE); +950 +951 /** +952 *
+953   * Pattern that matches a coprocessor 
specification. Form is:
+954   * {@code  '|'  ['|'  ['|' 
]]}
+955   * where arguments are {@code 
 '='  [,...]}
+956   * For example: {@code 
hdfs:///foo.jar|com.foo.FooRegionObserver|1001|arg1=1,arg2=2}
+957   * 
+958 * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. +959 */ +960 @Deprecated +961 public static final Pattern CP_HTD_ATTR_VALUE_PATTERN = +962 Pattern.compile("(^[^\\|]*)\\|([^\\|]+)\\|[\\s]*([\\d]*)[\\s]*(\\|.*)?$"); +963 /** +964 * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. +965 */ +966 @Deprecated +967 public static final String CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN = "[^=,]+"; +968 /** +969 * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. +970 */ +971 @Deprecated +972 public static final String CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN = "[^,]+"; +973 /** +974 * @deprecated It is used internally. As of release 2.0.0, this will be removed in HBase 3.0.0. +975 */ +976 @Deprecated +977 public static final Pattern CP_HTD_ATTR_VALUE_PARAM_PATTERN = Pattern.compile( +978 "(" + CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN + ")=(" + +979 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN + "),?"); +980 public static final String CP_HTD_ATTR_INCLUSION_KEY = +981 "hbase.coprocessor.classloader.included.classes"; +982 +983 /** The delay when re-trying a socket operation in a loop (HBASE-4712) */ +984 public static final int SOCKET_RETRY_WAIT_MS = 200; 985 -986 public static final String LOCALHOST_IP = "127.0.0.1"; -987 -988 public static final String REGION_SERVER_HANDLER_COUNT = "hbase.regionserver.handler.count"; -989 public static final int DEFAULT_REGION_SERVER_HANDLER_COUNT = 30; -990 -991 /* -992 * REG

[12/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
index b99f924..2bb6cea 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.TableCoprocessorAttribute.html
@@ -37,1779 +37,1734 @@
 029import java.util.UUID;
 030import 
java.util.concurrent.ConcurrentHashMap;
 031import 
java.util.concurrent.ConcurrentMap;
-032import java.util.regex.Matcher;
-033
-034import 
org.apache.commons.collections4.map.AbstractReferenceMap;
-035import 
org.apache.commons.collections4.map.ReferenceMap;
-036import 
org.apache.hadoop.conf.Configuration;
-037import org.apache.hadoop.fs.FileSystem;
-038import org.apache.hadoop.fs.Path;
-039import org.apache.hadoop.hbase.Cell;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.Coprocessor;
-042import 
org.apache.hadoop.hbase.HBaseConfiguration;
-043import 
org.apache.hadoop.hbase.HConstants;
-044import 
org.apache.hadoop.hbase.RawCellBuilder;
-045import 
org.apache.hadoop.hbase.RawCellBuilderFactory;
-046import 
org.apache.hadoop.hbase.ServerName;
-047import 
org.apache.hadoop.hbase.SharedConnection;
-048import 
org.apache.hadoop.hbase.client.Append;
-049import 
org.apache.hadoop.hbase.client.Connection;
-050import 
org.apache.hadoop.hbase.client.Delete;
-051import 
org.apache.hadoop.hbase.client.Durability;
-052import 
org.apache.hadoop.hbase.client.Get;
-053import 
org.apache.hadoop.hbase.client.Increment;
-054import 
org.apache.hadoop.hbase.client.Mutation;
-055import 
org.apache.hadoop.hbase.client.Put;
-056import 
org.apache.hadoop.hbase.client.RegionInfo;
-057import 
org.apache.hadoop.hbase.client.Result;
-058import 
org.apache.hadoop.hbase.client.Scan;
-059import 
org.apache.hadoop.hbase.client.TableDescriptor;
-060import 
org.apache.hadoop.hbase.coprocessor.BaseEnvironment;
-061import 
org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
-062import 
org.apache.hadoop.hbase.coprocessor.CoprocessorException;
-063import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-064import 
org.apache.hadoop.hbase.coprocessor.CoprocessorService;
-065import 
org.apache.hadoop.hbase.coprocessor.CoprocessorServiceBackwardCompatiblity;
-066import 
org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
-067import 
org.apache.hadoop.hbase.coprocessor.EndpointObserver;
-068import 
org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
-069import 
org.apache.hadoop.hbase.coprocessor.MetricsCoprocessor;
-070import 
org.apache.hadoop.hbase.coprocessor.ObserverContext;
-071import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
-072import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-073import 
org.apache.hadoop.hbase.coprocessor.RegionObserver;
-074import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-075import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-076import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-077import 
org.apache.hadoop.hbase.io.Reference;
-078import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-079import 
org.apache.hadoop.hbase.metrics.MetricRegistry;
-080import 
org.apache.hadoop.hbase.regionserver.Region.Operation;
-081import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-082import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
-083import 
org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
-084import 
org.apache.hadoop.hbase.security.User;
-085import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.CoprocessorClassLoader;
-088import 
org.apache.hadoop.hbase.util.Pair;
-089import 
org.apache.hadoop.hbase.wal.WALEdit;
-090import 
org.apache.hadoop.hbase.wal.WALKey;
-091import 
org.apache.yetus.audience.InterfaceAudience;
-092import org.slf4j.Logger;
-093import org.slf4j.LoggerFactory;
-094
-095/**
-096 * Implements the coprocessor environment 
and runtime support for coprocessors
-097 * loaded within a {@link Region}.
-098 */
-099@InterfaceAudience.Private
-100public class RegionCoprocessorHost
-101extends 
CoprocessorHost {
-102
-103  private static final Logger LOG = 
LoggerFactory.getLogger(RegionCoprocessorHost.class);
-104  // The shared data map
-105  private static final 
ReferenceMap> SHARED_DATA_MAP 
=
-106  new 
Referen

[30/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html 
b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
index 7e51664..aa9427f 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":9,"i6":9,"i7":10,"i8":9,"i9":9,"i10":9,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":42,"i22":42,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":9};
+var methods = 
{"i0":10,"i1":9,"i2":9,"i3":10,"i4":9,"i5":9,"i6":9,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":42,"i23":42,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":9,"i34":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class TableDescriptorBuilder
+public class TableDescriptorBuilder
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 
 Since:
@@ -167,93 +167,116 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 COMPACTION_ENABLED_KEY 
 
 
+static https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern
+CP_HTD_ATTR_KEY_PATTERN 
+
+
+private static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN 
+
+
+private static https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern
+CP_HTD_ATTR_VALUE_PARAM_PATTERN 
+
+
+private static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN 
+
+
+private static https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern
+CP_HTD_ATTR_VALUE_PATTERN
+
+ Pattern that matches a coprocessor specification.
+
+
+
 static boolean
 DEFAULT_COMPACTION_ENABLED
 Constant that denotes whether the table is compaction 
enabled by default
 
 
-
+
 private static Durability
 DEFAULT_DURABLITY
 Default durability for HTD is USE_DEFAULT, which defaults 
to HBase-global
  default value
 
 
-
+
 static long
 DEFAULT_MEMSTORE_FLUSH_SIZE
 Constant that denotes the maximum default size of the 
memstore after which
  the contents are flushed to the store files
 
 
-
+
 static boolean
 DEFAULT_NORMALIZATION_ENABLED
 Constant that denotes whether the table is normalized by 
default.
 
 
-
+
 private static int
 DEFAULT_PRIORITY
 Relative priority of the table used for rpc scheduling
 
 
-
+
 static boolean
 DEFAULT_READONLY
 Constant that denotes whether the table is READONLY by 
default and is false
 
 
-
+
 static boolean
 DEFAULT_REGION_MEMSTORE_REPLICATION 
 
-
+
 static int
 DEFAULT_REGION_REPLICATION 
 
-
+
 private static https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String>
 DEFAULT_VALUES 
 
-
+
 private TableDescriptorBuilder.ModifyableTableDescriptor
 desc 
 
-
+
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 DURABILITY
 Durability 
setting for the table.
 
 
-
+
 private static Bytes
 DURABILITY_KEY 
 
-
+
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 FLUSH_POLICY 
 
-
+
 private static Bytes
 FLUSH_POLICY_KEY 
 
-
+
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 IS_META
 Used by rest interface to access this metadata attribute
  which denotes if it is a catalog table, either  hbase:meta 
.
 
 
-
+
 priva

[48/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/apidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/org/apache/hadoop/hbase/HConstants.html
index 66fd992..e6e2279 100644
--- a/apidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/org/apache/hadoop/hbase/HConstants.html
@@ -253,25 +253,42 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern
-CP_HTD_ATTR_KEY_PATTERN 
+CP_HTD_ATTR_KEY_PATTERN
+Deprecated. 
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN 
+CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
+Deprecated. 
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern
-CP_HTD_ATTR_VALUE_PARAM_PATTERN 
+CP_HTD_ATTR_VALUE_PARAM_PATTERN
+Deprecated. 
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN 
+CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
+Deprecated. 
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
 
 
 static https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_VALUE_PATTERN
-
- Pattern that matches a coprocessor specification.
+Deprecated. 
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
 
 
 
@@ -4732,7 +4749,9 @@ public static final https://docs.oracle.com/javase/8/docs/api/java
 
 
 CP_HTD_ATTR_KEY_PATTERN
-public static final https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern CP_HTD_ATTR_KEY_PATTERN
+https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
+public static final https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern CP_HTD_ATTR_KEY_PATTERN
+Deprecated. It is used internally. As of release 2.0.0, this 
will be removed in HBase 3.0.0.
 
 
 
@@ -4741,7 +4760,9 @@ public static final https://docs.oracle.com/javase/8/docs/api/java
 
 
 CP_HTD_ATTR_VALUE_PATTERN
-public static final https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern CP_HTD_ATTR_VALUE_PATTERN
+https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
+public static final https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern CP_HTD_ATTR_VALUE_PATTERN
+Deprecated. It is used internally. As of release 2.0.0, this 
will be removed in HBase 3.0.0.
 
  Pattern that matches a coprocessor specification. Form is:
    '|'  ['|' 
 ['|' ]]
@@ -4756,7 +4777,9 @@ public static final https://docs.oracle.com/javase/8/docs/api/java
 
 
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
-public static final https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
+https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
+public static final https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
+Deprecated. It is used internally. As of release 2.0.0, this 
will be removed in HBase 3.0.0.
 
 See Also:
 Constant
 Field Values
@@ -4769,7 +4792,9 @@ public static final https://docs.oracle.com/javase/8/docs/api/java
 
 
 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
-public static final https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
+https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in jav

[33/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 90d9b0f..48a98fa 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -168,9 +168,9 @@
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.backup.BackupInfo.BackupPhase
-org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand
 org.apache.hadoop.hbase.backup.BackupInfo.BackupState
 org.apache.hadoop.hbase.backup.BackupType
+org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/client/Consistency.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Consistency.html 
b/devapidocs/org/apache/hadoop/hbase/client/Consistency.html
index 8371a6a..7f3f29c 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Consistency.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Consistency.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames
@@ -318,7 +318,7 @@ not permitted.)
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/client/CoprocessorDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/CoprocessorDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/client/CoprocessorDescriptor.html
new file mode 100644
index 000..4922125
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/client/CoprocessorDescriptor.html
@@ -0,0 +1,285 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+CoprocessorDescriptor (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":6,"i1":6,"i2":6,"i3":6};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Interface 
CoprocessorDescriptor
+
+
+
+
+
+
+All Known Implementing Classes:
+CoprocessorDescriptorBuilder.CoprocessorDescriptorImpl
+
+
+
+@InterfaceAudience.Public
+public interface CoprocessorDescriptor
+CoprocessorDescriptor contains the details about how to 
build a coprocessor.
+ This class is a pojo so there are no checks for the details carried by this 
class.
+ Use CoprocessorDescriptorBuilder 
to instantiate a CoprocessorDescriptor
+
+
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All Methods Instance Methods Abstract Methods 
+
+Modifier and Type
+Method and Description
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+getClassName() 
+
+
+https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">OptionalString>
+getJarPath() 
+
+
+int
+getPriority() 
+
+
+https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,https://docs.oracle.com/javas

[44/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/apidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html
index 15df146..31d2f97 100644
--- 
a/apidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html
+++ 
b/apidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html
@@ -231,35 +231,31 @@
 
 
 
-TableDescriptorBuilder
-TableDescriptorBuilder.addColumnFamily(ColumnFamilyDescriptor family) 
-
-
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 AsyncAdmin.addColumnFamily(TableName tableName,
ColumnFamilyDescriptor columnFamily)
 Add a column family to an existing table.
 
 
-
+
 void
 Admin.addColumnFamily(TableName tableName,
ColumnFamilyDescriptor columnFamily)
 Add a column family to an existing table.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
 Admin.addColumnFamilyAsync(TableName tableName,
 ColumnFamilyDescriptor columnFamily)
 Add a column family to an existing table.
 
 
-
+
 static ColumnFamilyDescriptor
 ColumnFamilyDescriptorBuilder.copy(ColumnFamilyDescriptor desc) 
 
-
+
 default void
 Admin.modifyColumn(TableName tableName,
 ColumnFamilyDescriptor columnFamily)
@@ -270,35 +266,39 @@
 
 
 
-
+
 TableDescriptorBuilder
 TableDescriptorBuilder.modifyColumnFamily(ColumnFamilyDescriptor family) 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureVoid>
 AsyncAdmin.modifyColumnFamily(TableName tableName,
   ColumnFamilyDescriptor columnFamily)
 Modify an existing column family on a table.
 
 
-
+
 void
 Admin.modifyColumnFamily(TableName tableName,
   ColumnFamilyDescriptor columnFamily)
 Modify an existing column family on a table.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureVoid>
 Admin.modifyColumnFamilyAsync(TableName tableName,
ColumnFamilyDescriptor columnFamily)
 Modify an existing column family on a table.
 
 
-
+
 static ColumnFamilyDescriptorBuilder
 ColumnFamilyDescriptorBuilder.newBuilder(ColumnFamilyDescriptor desc) 
 
+
+TableDescriptorBuilder
+TableDescriptorBuilder.setColumnFamily(ColumnFamilyDescriptor family) 
+
 
 static byte[]
 ColumnFamilyDescriptorBuilder.toByteArray(ColumnFamilyDescriptor desc) 
@@ -316,6 +316,10 @@
 static https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true";
 title="class or interface in java.util">Comparator
 TableDescriptor.getComparator(https://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true";
 title="class or interface in java.util">Comparator cfComparator) 
 
+
+TableDescriptorBuilder
+TableDescriptorBuilder.setColumnFamilies(https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection families) 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/apidocs/org/apache/hadoop/hbase/client/class-use/CoprocessorDescriptor.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/class-use/CoprocessorDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/CoprocessorDescriptor.html
new file mode 100644
index 000..ab7ecfd
--- /dev/null
+++ 
b/apidocs/org/apache/hadoop/hbase/client/class-use/CoprocessorDescriptor.html
@@ -0,0 +1,236 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+Uses of Interface org.apache.hadoop.hbase.client.CoprocessorDescriptor 
(Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
-var methods = 
{"i0":42,"i1":42,"i2":42,"i3":42,"i4":42,"i5":42,"i6":42,"i7":42,"i8":42,"i9":42,"i10":42,"i11":42,"i12":42,"i13":42,"i14":42,"i15":42,"i16":42,"i17":42,"i18":42,"i19":42,"i20":42,"i21":42,"i22":42,"i23":42,"i24":42,"i25":42,"i26":42,"i27":42,"i28":42,"i29":42,"i30":42,"i31":42,"i32":42,"i33":42,"i34":42,"i35":42,"i36":42,"i37":42,"i38":42,"i39":42,"i40":42,"i41":42,"i42":42,"i43":42,"i44":41,"i45":42,"i46":42,"i47":42,"i48":42,"i49":42,"i50":42,"i51":42,"i52":42,"i53":42,"i54":42,"i55":42,"i56":42,"i57":42,"i58":42,"i59":42,"i60":42,"i61":42,"i62":42,"i63":42,"i64":42,"i65":42,"i66":42,"i67":42,"i68":42,"i69":42,"i70":42,"i71":42,"i72":42,"i73":42};
+var methods = 
{"i0":42,"i1":42,"i2":42,"i3":42,"i4":42,"i5":42,"i6":42,"i7":42,"i8":42,"i9":42,"i10":42,"i11":42,"i12":42,"i13":42,"i14":42,"i15":42,"i16":42,"i17":42,"i18":42,"i19":42,"i20":42,"i21":42,"i22":42,"i23":42,"i24":42,"i25":42,"i26":42,"i27":42,"i28":42,"i29":42,"i30":42,"i31":42,"i32":42,"i33":42,"i34":42,"i35":42,"i36":42,"i37":42,"i38":42,"i39":42,"i40":42,"i41":42,"i42":42,"i43":42,"i44":42,"i45":41,"i46":42,"i47":42,"i48":42,"i49":42,"i50":42,"i51":42,"i52":42,"i53":42,"i54":42,"i55":42,"i56":42,"i57":42,"i58":42,"i59":42,"i60":42,"i61":42,"i62":42,"i63":42,"i64":42,"i65":42,"i66":42,"i67":42,"i68":42,"i69":42,"i70":42,"i71":42,"i72":42,"i73":42,"i74":42};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 
 https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
  @InterfaceAudience.Public
-public class HTableDescriptor
+public class HTableDescriptor
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements TableDescriptor, https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable
 HTableDescriptor contains the details about an HBase table  
such as the descriptors of
@@ -472,26 +472,33 @@ implements 
+https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
+getCoprocessorDescriptors()
+Deprecated. 
+Return the list of attached co-processor represented
+
+
+
 https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString>
 getCoprocessors()
 Deprecated. 
 Return the list of attached co-processor represented by 
their name className
 
 
-
+
 protected TableDescriptorBuilder.ModifyableTableDescriptor
 getDelegateeForModification()
 Deprecated. 
  
 
-
+
 Durability
 getDurability()
 Deprecated. 
 Returns the durability setting for the table.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/util/Collection.html?is-external=true";
 title="class or interface in java.util">Collection
 getFamilies()
 Deprecated. 
@@ -499,7 +506,7 @@ implements 
+
 https://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true";
 title="class or interface in java.util">Set
 getFamiliesKeys()
 Deprecated. 
@@ -509,7 +516,7 @@ implements 
+
 HColumnDescriptor
 getFamily(byte[] column)
 Deprecated. 
@@ -517,7 +524,7 @@ implements 
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 getFlushPolicyClassName()
 Deprecated. 
@@ -525,7 +532,7 @@ implements 
+
 long
 getMaxFileSize()
 Deprecated. 
@@ -533,40 +540,40 @@ implements 
+
 long
 getMemStoreFlushSize()
 Deprecated. 
 Returns the size of the memstore after which a flush to 
filesystem is triggered.
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 getNameAsString()
 Deprecated. 
 Get the name of the table as a String
 
 
-
+
 https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 getOwnerString()
 Depre

[13/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionObserverOperationWithoutResult.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionObserverOperationWithoutResult.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionObserverOperationWithoutResult.html
index b99f924..2bb6cea 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionObserverOperationWithoutResult.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.RegionObserverOperationWithoutResult.html
@@ -37,1779 +37,1734 @@
 029import java.util.UUID;
 030import 
java.util.concurrent.ConcurrentHashMap;
 031import 
java.util.concurrent.ConcurrentMap;
-032import java.util.regex.Matcher;
-033
-034import 
org.apache.commons.collections4.map.AbstractReferenceMap;
-035import 
org.apache.commons.collections4.map.ReferenceMap;
-036import 
org.apache.hadoop.conf.Configuration;
-037import org.apache.hadoop.fs.FileSystem;
-038import org.apache.hadoop.fs.Path;
-039import org.apache.hadoop.hbase.Cell;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.Coprocessor;
-042import 
org.apache.hadoop.hbase.HBaseConfiguration;
-043import 
org.apache.hadoop.hbase.HConstants;
-044import 
org.apache.hadoop.hbase.RawCellBuilder;
-045import 
org.apache.hadoop.hbase.RawCellBuilderFactory;
-046import 
org.apache.hadoop.hbase.ServerName;
-047import 
org.apache.hadoop.hbase.SharedConnection;
-048import 
org.apache.hadoop.hbase.client.Append;
-049import 
org.apache.hadoop.hbase.client.Connection;
-050import 
org.apache.hadoop.hbase.client.Delete;
-051import 
org.apache.hadoop.hbase.client.Durability;
-052import 
org.apache.hadoop.hbase.client.Get;
-053import 
org.apache.hadoop.hbase.client.Increment;
-054import 
org.apache.hadoop.hbase.client.Mutation;
-055import 
org.apache.hadoop.hbase.client.Put;
-056import 
org.apache.hadoop.hbase.client.RegionInfo;
-057import 
org.apache.hadoop.hbase.client.Result;
-058import 
org.apache.hadoop.hbase.client.Scan;
-059import 
org.apache.hadoop.hbase.client.TableDescriptor;
-060import 
org.apache.hadoop.hbase.coprocessor.BaseEnvironment;
-061import 
org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
-062import 
org.apache.hadoop.hbase.coprocessor.CoprocessorException;
-063import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-064import 
org.apache.hadoop.hbase.coprocessor.CoprocessorService;
-065import 
org.apache.hadoop.hbase.coprocessor.CoprocessorServiceBackwardCompatiblity;
-066import 
org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
-067import 
org.apache.hadoop.hbase.coprocessor.EndpointObserver;
-068import 
org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
-069import 
org.apache.hadoop.hbase.coprocessor.MetricsCoprocessor;
-070import 
org.apache.hadoop.hbase.coprocessor.ObserverContext;
-071import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
-072import 
org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
-073import 
org.apache.hadoop.hbase.coprocessor.RegionObserver;
-074import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-075import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-076import 
org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
-077import 
org.apache.hadoop.hbase.io.Reference;
-078import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-079import 
org.apache.hadoop.hbase.metrics.MetricRegistry;
-080import 
org.apache.hadoop.hbase.regionserver.Region.Operation;
-081import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
-082import 
org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
-083import 
org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
-084import 
org.apache.hadoop.hbase.security.User;
-085import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-086import 
org.apache.hadoop.hbase.util.Bytes;
-087import 
org.apache.hadoop.hbase.util.CoprocessorClassLoader;
-088import 
org.apache.hadoop.hbase.util.Pair;
-089import 
org.apache.hadoop.hbase.wal.WALEdit;
-090import 
org.apache.hadoop.hbase.wal.WALKey;
-091import 
org.apache.yetus.audience.InterfaceAudience;
-092import org.slf4j.Logger;
-093import org.slf4j.LoggerFactory;
-094
-095/**
-096 * Implements the coprocessor environment 
and runtime support for coprocessors
-097 * loaded within a {@link Region}.
-098 */
-099@InterfaceAudience.Private
-100public class RegionCoprocessorHost
-101extends 
CoprocessorHost {
-102
-103  private static final Logger LOG = 
LoggerFactory.getLogger(RegionCoprocessorHost.class);
-104  // The shared data map
-105  private static final 
ReferenceMap

[37/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 6fdf162..5bd8a2e 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 ©2007 - 2018 The Apache Software Foundation
 
-  File: 3586,
- Errors: 16104,
+  File: 3589,
+ Errors: 16093,
  Warnings: 0,
  Infos: 0
   
@@ -6173,7 +6173,7 @@ under the License.
   0
 
 
-  2
+  1
 
   
   
@@ -12599,7 +12599,7 @@ under the License.
   0
 
 
-  8
+  7
 
   
   
@@ -15992,6 +15992,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder.java";>org/apache/hadoop/hbase/client/CoprocessorDescriptorBuilder.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.chaos.factories.CalmMonkeyFactory.java";>org/apache/hadoop/hbase/chaos/factories/CalmMonkeyFactory.java
 
 
@@ -28559,7 +28573,7 @@ under the License.
   0
 
 
-  80
+  79
 
   
   
@@ -33100,6 +33114,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.TestCoprocessorDescriptor.java";>org/apache/hadoop/hbase/client/TestCoprocessorDescriptor.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.http.jmx.package-info.java";>org/apache/hadoop/hbase/http/jmx/package-info.java
 
 
@@ -38322,6 +38350,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.CoprocessorDescriptor.java";>org/apache/hadoop/hbase/client/CoprocessorDescriptor.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.rest.DummyFilter.java";>org/apache/hadoop/hbase/rest/DummyFilter.java
 
 
@@ -46843,7 +46885,7 @@ under the License.
   0
 
 
-  40
+  32
 
   
   

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/coc.html
--
diff --git a/coc.html b/coc.html
index 2eea0d2..f39458c 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – 
   Code of Conduct Policy
@@ -368,7 +368,7 @@ email to mailto:priv...@hbase.apache.org";>the priv
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-03-15
+  Last Published: 
2018-03-16
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index 04ceb00..50d0ab4 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -667,7 +667,7 @@ Now your HBase server is running, start 
coding and build that next
 https://www.apache.org/";>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-03-15
+  Last Published: 
2018-03-16
 

[09/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/util/FSTableDescriptors.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSTableDescriptors.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSTableDescriptors.html
index 7226d18..f065ddb 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/util/FSTableDescriptors.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/util/FSTableDescriptors.html
@@ -45,733 +45,736 @@
 037import org.apache.hadoop.fs.FileSystem;
 038import org.apache.hadoop.fs.Path;
 039import org.apache.hadoop.fs.PathFilter;
-040import 
org.apache.yetus.audience.InterfaceAudience;
-041import org.slf4j.Logger;
-042import org.slf4j.LoggerFactory;
-043import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-044import 
org.apache.hadoop.hbase.client.TableDescriptor;
-045import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-046import 
org.apache.hadoop.hbase.Coprocessor;
-047import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-048import 
org.apache.hadoop.hbase.HConstants;
-049import 
org.apache.hadoop.hbase.regionserver.BloomType;
-050import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-051import 
org.apache.hbase.thirdparty.com.google.common.primitives.Ints;
-052import 
org.apache.hadoop.hbase.TableDescriptors;
-053import 
org.apache.hadoop.hbase.TableInfoMissingException;
-054import 
org.apache.hadoop.hbase.TableName;
-055
-056/**
-057 * Implementation of {@link 
TableDescriptors} that reads descriptors from the
-058 * passed filesystem.  It expects 
descriptors to be in a file in the
-059 * {@link #TABLEINFO_DIR} subdir of the 
table's directory in FS.  Can be read-only
-060 *  -- i.e. does not modify the 
filesystem or can be read and write.
-061 *
-062 * 

Also has utility for keeping up the table descriptors tableinfo file. -063 * The table schema file is kept in the {@link #TABLEINFO_DIR} subdir -064 * of the table directory in the filesystem. -065 * It has a {@link #TABLEINFO_FILE_PREFIX} and then a suffix that is the -066 * edit sequenceid: e.g. .tableinfo.03. This sequenceid -067 * is always increasing. It starts at zero. The table schema file with the -068 * highest sequenceid has the most recent schema edit. Usually there is one file -069 * only, the most recent but there may be short periods where there are more -070 * than one file. Old files are eventually cleaned. Presumption is that there -071 * will not be lots of concurrent clients making table schema edits. If so, -072 * the below needs a bit of a reworking and perhaps some supporting api in hdfs. -073 */ -074@InterfaceAudience.Private -075public class FSTableDescriptors implements TableDescriptors { -076 private static final Logger LOG = LoggerFactory.getLogger(FSTableDescriptors.class); -077 private final FileSystem fs; -078 private final Path rootdir; -079 private final boolean fsreadonly; -080 private volatile boolean usecache; -081 private volatile boolean fsvisited; -082 -083 @VisibleForTesting -084 long cachehits = 0; +040import org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder; +041import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint; +042import org.apache.yetus.audience.InterfaceAudience; +043import org.slf4j.Logger; +044import org.slf4j.LoggerFactory; +045import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; +046import org.apache.hadoop.hbase.client.TableDescriptor; +047import org.apache.hadoop.hbase.client.TableDescriptorBuilder; +048import org.apache.hadoop.hbase.Coprocessor; +049import org.apache.hadoop.hbase.exceptions.DeserializationException; +050import org.apache.hadoop.hbase.HConstants; +051import org.apache.hadoop.hbase.regionserver.BloomType; +052import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; +053import org.apache.hbase.thirdparty.com.google.common.primitives.Ints; +054import org.apache.hadoop.hbase.TableDescriptors; +055import org.apache.hadoop.hbase.TableInfoMissingException; +056import org.apache.hadoop.hbase.TableName; +057 +058/** +059 * Implementation of {@link TableDescriptors} that reads descriptors from the +060 * passed filesystem. It expects descriptors to be in a file in the +061 * {@link #TABLEINFO_DIR} subdir of the table's directory in FS. Can be read-only +062 * -- i.e. does not modify the filesystem or can be read and write. +063 * +064 *

Also has utility for keeping up the table descriptors tableinfo file. +065 * The table schema file is kept in the {@link #TABLEINFO_DIR} subdir +066 * of the table directory in the filesystem. +067 * It has a {@link #TABLEINFO_FILE_PREFIX} and then a suffix that is the +068 * edit sequenceid: e.g. .tableinfo.03. This sequenceid +069 * is always increa


[24/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index 0d8d461..0df1ff1 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -532,14 +532,14 @@
 
 java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.util.PrettyPrinter.Unit
 org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE
 org.apache.hadoop.hbase.util.Order
-org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType
 org.apache.hadoop.hbase.util.ChecksumType
-org.apache.hadoop.hbase.util.PoolMap.PoolType
-org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.PureJavaComparer
 (implements org.apache.hadoop.hbase.util.Bytes.Comparer)
 org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.UnsafeComparer
 (implements org.apache.hadoop.hbase.util.Bytes.Comparer)
+org.apache.hadoop.hbase.util.IdReadWriteLock.ReferenceType
+org.apache.hadoop.hbase.util.Bytes.LexicographicalComparerHolder.PureJavaComparer
 (implements org.apache.hadoop.hbase.util.Bytes.Comparer)
+org.apache.hadoop.hbase.util.PrettyPrinter.Unit
+org.apache.hadoop.hbase.util.PoolMap.PoolType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/overview-tree.html
--
diff --git a/devapidocs/overview-tree.html b/devapidocs/overview-tree.html
index 2e76ee9..a9fb4aa 100644
--- a/devapidocs/overview-tree.html
+++ b/devapidocs/overview-tree.html
@@ -1347,6 +1347,8 @@
 org.apache.hadoop.hbase.io.crypto.Encryption.Context
 
 
+org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder
+org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder.CoprocessorDescriptorImpl
 (implements org.apache.hadoop.hbase.client.CoprocessorDescriptor)
 org.apache.hadoop.hbase.coprocessor.CoprocessorHost
 
 org.apache.hadoop.hbase.master.MasterCoprocessorHost
@@ -4684,6 +4686,7 @@
 org.apache.hadoop.hbase.coprocessor.WALCoprocessor
 
 
+org.apache.hadoop.hbase.client.CoprocessorDescriptor
 org.apache.hadoop.hbase.CoprocessorEnvironment
 
 org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment



[46/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/apidocs/org/apache/hadoop/hbase/client/Consistency.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Consistency.html 
b/apidocs/org/apache/hadoop/hbase/client/Consistency.html
index c645bcf..b3be61d 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Consistency.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Consistency.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames
@@ -318,7 +318,7 @@ not permitted.)
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/apidocs/org/apache/hadoop/hbase/client/CoprocessorDescriptor.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/CoprocessorDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/client/CoprocessorDescriptor.html
new file mode 100644
index 000..4baf4e3
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/client/CoprocessorDescriptor.html
@@ -0,0 +1,281 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+CoprocessorDescriptor (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":6,"i1":6,"i2":6,"i3":6};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Interface 
CoprocessorDescriptor
+
+
+
+
+
+
+
+@InterfaceAudience.Public
+public interface CoprocessorDescriptor
+CoprocessorDescriptor contains the details about how to 
build a coprocessor.
+ This class is a pojo so there are no checks for the details carried by this 
class.
+ Use CoprocessorDescriptorBuilder 
to instantiate a CoprocessorDescriptor
+
+
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All Methods Instance Methods Abstract Methods 
+
+Modifier and Type
+Method and Description
+
+
+https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+getClassName() 
+
+
+https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">OptionalString>
+getJarPath() 
+
+
+int
+getPriority() 
+
+
+https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String>
+getProperties() 
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Method Detail
+
+
+
+
+
+getClassName
+https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String getClassName()
+
+Returns:
+the name of the class or interface represented by this object.
+
+
+
+
+
+
+
+
+getJarPath
+https://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true";
 title="class or interface in java.util">OptionalString> getJarPath()
+
+Returns:
+Path of the jar file. If it's null, the class will be loaded from default 
classloader.
+
+
+
+
+
+
+
+
+getPriority
+int getPriority()
+
+Returns:
+The order to execute this coprocessor
+
+
+
+
+
+
+
+
+getProperties
+https://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in java.util">MapString,https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String> getProperties()
+
+Returns:
+Arbitrary key-val

[50/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 7f9e649..bd9bc14 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20180315144658+00'00')
-/CreationDate (D:20180315144658+00'00')
+/ModDate (D:20180316144642+00'00')
+/CreationDate (D:20180316144642+00'00')
 >>
 endobj
 2 0 obj
@@ -27896,7 +27896,7 @@ endobj
 endobj
 136 0 obj
 << /Limits [(__anchor-top) (adding.new.node)]
-/Names [(__anchor-top) 25 0 R (__indexterm-7407378) 3454 0 R 
(__indexterm-7409782) 3456 0 R (__indexterm-7411322) 3458 0 R 
(__indexterm-7413506) 3459 0 R (acid) 916 0 R (acl) 3275 0 R 
(add-metric-name-and-function-to-hadoop-compat-interface) 3557 0 R 
(add-the-implementation-to-both-hadoop-1-and-hadoop-2-compat-modules) 3559 0 R 
(add.metrics) 3555 0 R (adding-a-new-chapter-to-the-hbase-reference-guide) 3797 
0 R (adding.new.node) 3024 0 R]
+/Names [(__anchor-top) 25 0 R (__indexterm-7408246) 3454 0 R 
(__indexterm-7410650) 3456 0 R (__indexterm-7412190) 3458 0 R 
(__indexterm-7414374) 3459 0 R (acid) 916 0 R (acl) 3275 0 R 
(add-metric-name-and-function-to-hadoop-compat-interface) 3557 0 R 
(add-the-implementation-to-both-hadoop-1-and-hadoop-2-compat-modules) 3559 0 R 
(add.metrics) 3555 0 R (adding-a-new-chapter-to-the-hbase-reference-guide) 3797 
0 R (adding.new.node) 3024 0 R]
 >>
 endobj
 137 0 obj
@@ -633359,7 +633359,7 @@ endobj
 [3260 0 R /XYZ 0 841.89 null]
 endobj
 3262 0 obj
-<< /Length 15625
+<< /Length 16145
 >>
 stream
 q
@@ -633506,12 +633506,12 @@ ET
 0.2 0.2 0.2 scn
 0.2 0.2 0.2 SCN
 
-0.5361 Tw
+0.335 Tw
 
 BT
 48.24 662.306 Td
 /F1.0 10.5 Tf
-<525347726f7570732063616e20626520646566696e656420616e64206d616e616765642077697468207368656c6c20636f6d6d616e6473206f7220636f72726573706f6e64696e67204a61766120415049732e204120736572766572>
 Tj
+<525347726f7570732061726520646566696e656420616e64206d616e616765642077697468207368656c6c20636f6d6d616e64732e20546865207368656c6c20647269766573206120436f70726f636573736f7220456e64706f696e74>
 Tj
 ET
 
 
@@ -633521,12 +633521,12 @@ ET
 0.2 0.2 0.2 scn
 0.2 0.2 0.2 SCN
 
-0.1941 Tw
+0.3309 Tw
 
 BT
 48.24 646.526 Td
 /F1.0 10.5 Tf
-<63616e20626520616464656420746f20612067726f7570207769746820686f73746e616d6520616e6420706f7274207061697220616e64207461626c65732063616e206265206d6f76656420746f20746869732067726f757020736f2074686174>
 Tj
+[<77686f736520415049206973206d61726b> 20.0195 
<6564207072697661746520676976656e207468697320697320616e2065766f6c76696e6720666561747572653b2074686520436f70726f636573736f7220415049206973206e6f7420666f72207075626c6963>]
 TJ
 ET
 
 
@@ -633536,12 +633536,12 @@ ET
 0.2 0.2 0.2 scn
 0.2 0.2 0.2 SCN
 
-0.6004 Tw
+1.6026 Tw
 
 BT
 48.24 630.746 Td
 /F1.0 10.5 Tf
-<6f6e6c7920726567696f6e7365727665727320696e207468652073616d6520727367726f75702063616e20686f73742074686520726567696f6e73206f6620746865207461626c652e20526567696f6e5365727665727320616e64207461626c6573>
 Tj
+<636f6e73756d7074696f6e2e2041207365727665722063616e20626520616464656420746f20612067726f7570207769746820686f73746e616d6520616e6420706f7274207061697220616e64207461626c65732063616e206265>
 Tj
 ET
 
 
@@ -633551,12 +633551,42 @@ ET
 0.2 0.2 0.2 scn
 0.2 0.2 0.2 SCN
 
-1.7781 Tw
+1.7076 Tw
 
 BT
 48.24 614.966 Td
 /F1.0 10.5 Tf
-[<63616e206f6e6c792062656c6f6e6720746f206f6e6520727367726f757020617420612074696d652e2042>
 20.0195 
<792064656661756c742c20616c6c207461626c657320616e6420726567696f6e736572766572732062656c6f6e6720746f20746865>]
 TJ
+<6d6f76656420746f20746869732067726f757020736f2074686174206f6e6c7920726567696f6e7365727665727320696e207468652073616d6520727367726f75702063616e20686f73742074686520726567696f6e73206f6620746865>
 Tj
+ET
+
+
+0.0 Tw
+0.0 0.0 0.0 SCN
+0.0 0.0 0.0 scn
+0.2 0.2 0.2 scn
+0.2 0.2 0.2 SCN
+
+0.6212 Tw
+
+BT
+48.24 599.186 Td
+/F1.0 10.5 Tf
+[<7461626c652e20526567696f6e5365727665727320616e64207461626c65732063616e206f6e6c792062656c6f6e6720746f206f6e6520727367726f757020617420612074696d652e2042>
 20.0195 <792064656661756c742c20616c6c207461626c657320616e64>] TJ
+ET
+
+
+0.0 Tw
+0.0 0.0 0.0 SCN
+0.0 0.0 0.0 scn
+0.2 0.2 0.2 scn
+0.2 0.2 0.2 SCN
+
+0.4947 Tw
+
+BT
+48.24 583.406 Td
+/F1.0 10.5 Tf
+<726567696f6e736572766572732062656c6f6e6720746f2074686520> Tj
 ET
 
 
@@ -633566,10 +633596,10 @@ ET
 0.6941 0.1294 0.2745 scn
 0.6941 0.1294 0.2745 SCN
 
-1.9338 Tw
+0.4947 Tw
 
 BT
-48.24 599.186 Td
+189.6797 583.406 Td
 /F4.0 10.5 Tf
 <64656661756c74> Tj
 ET
@@ -633581,12 +633611,12 @@ ET
 0.2 0.2 0.2 scn
 0.2 0.2 0.2 SCN
 
-1.9338 Tw
+0.4947 Tw
 
 BT
-84.99 599.186 Td
+226.4297 583.406 Td
 /F1.0 10.5 Tf
-[<20727367726f75702e2053> 20.0195 
<797374656d207461626c65732063616e20616c736f2062652070757420696e746f206120727367726

[19/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/devapidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html
index 05c0542..2d09bf8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html
@@ -35,1393 +35,1419 @@
 027import java.util.HashSet;
 028import java.util.List;
 029import java.util.Map;
-030import java.util.Set;
-031import java.util.TreeMap;
-032import java.util.TreeSet;
-033import java.util.function.Function;
-034import java.util.regex.Matcher;
-035import org.apache.hadoop.fs.Path;
-036import 
org.apache.hadoop.hbase.Coprocessor;
-037import 
org.apache.hadoop.hbase.HConstants;
-038import 
org.apache.hadoop.hbase.TableName;
-039import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-040import 
org.apache.hadoop.hbase.security.User;
-041import 
org.apache.hadoop.hbase.util.Bytes;
-042import 
org.apache.yetus.audience.InterfaceAudience;
-043import org.slf4j.Logger;
-044import org.slf4j.LoggerFactory;
-045
-046import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-047import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
-048
-049/**
-050 * @since 2.0.0
-051 */
-052@InterfaceAudience.Public
-053public class TableDescriptorBuilder {
-054  public static final Logger LOG = 
LoggerFactory.getLogger(TableDescriptorBuilder.class);
-055  @InterfaceAudience.Private
-056  public static final String SPLIT_POLICY 
= "SPLIT_POLICY";
-057  private static final Bytes 
SPLIT_POLICY_KEY = new Bytes(Bytes.toBytes(SPLIT_POLICY));
-058  /**
-059   * Used by HBase Shell interface to 
access this metadata
-060   * attribute which denotes the maximum 
size of the store file after which a
-061   * region split occurs.
-062   */
-063  @InterfaceAudience.Private
-064  public static final String MAX_FILESIZE 
= "MAX_FILESIZE";
-065  private static final Bytes 
MAX_FILESIZE_KEY
-066  = new 
Bytes(Bytes.toBytes(MAX_FILESIZE));
-067
-068  @InterfaceAudience.Private
-069  public static final String OWNER = 
"OWNER";
+030import java.util.Objects;
+031import java.util.Optional;
+032import java.util.Set;
+033import java.util.TreeMap;
+034import java.util.TreeSet;
+035import java.util.function.Function;
+036import java.util.regex.Matcher;
+037import java.util.regex.Pattern;
+038import 
org.apache.hadoop.hbase.Coprocessor;
+039import 
org.apache.hadoop.hbase.HConstants;
+040import 
org.apache.hadoop.hbase.TableName;
+041import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
+042import 
org.apache.hadoop.hbase.security.User;
+043import 
org.apache.hadoop.hbase.util.Bytes;
+044import 
org.apache.yetus.audience.InterfaceAudience;
+045import org.slf4j.Logger;
+046import org.slf4j.LoggerFactory;
+047
+048import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+049import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
+050
+051/**
+052 * @since 2.0.0
+053 */
+054@InterfaceAudience.Public
+055public class TableDescriptorBuilder {
+056  public static final Logger LOG = 
LoggerFactory.getLogger(TableDescriptorBuilder.class);
+057  @InterfaceAudience.Private
+058  public static final String SPLIT_POLICY 
= "SPLIT_POLICY";
+059  private static final Bytes 
SPLIT_POLICY_KEY = new Bytes(Bytes.toBytes(SPLIT_POLICY));
+060  /**
+061   * Used by HBase Shell interface to 
access this metadata
+062   * attribute which denotes the maximum 
size of the store file after which a
+063   * region split occurs.
+064   */
+065  @InterfaceAudience.Private
+066  public static final String MAX_FILESIZE 
= "MAX_FILESIZE";
+067  private static final Bytes 
MAX_FILESIZE_KEY
+068  = new 
Bytes(Bytes.toBytes(MAX_FILESIZE));
+069
 070  @InterfaceAudience.Private
-071  public static final Bytes OWNER_KEY
-072  = new 
Bytes(Bytes.toBytes(OWNER));
-073
-074  /**
-075   * Used by rest interface to access 
this metadata attribute
-076   * which denotes if the table is Read 
Only.
-077   */
-078  @InterfaceAudience.Private
-079  public static final String READONLY = 
"READONLY";
-080  private static final Bytes 
READONLY_KEY
-081  = new 
Bytes(Bytes.toBytes(READONLY));
-082
-083  /**
-084   * Used by HBase Shell interface to 
access this metadata
-085   * attribute which denotes if the table 
is compaction enabled.
-086   */
-087  @InterfaceAudience.Private
-088  public static final String 
COMPACTION_ENABLED = "COMPACTION_ENABLED";
-089  private static final Bytes 
COMPACTION_ENABLED_KEY
-090  = new 
Bytes(Bytes.toBytes(COMPACTION_ENABLED))

[49/51] [partial] hbase-site git commit: Published site at 22f4def942f8a3367d0ca6598317e9b9a7d0cfcd.

2018-03-16 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/apidocs/deprecated-list.html
--
diff --git a/apidocs/deprecated-list.html b/apidocs/deprecated-list.html
index c82f999..fb294b7 100644
--- a/apidocs/deprecated-list.html
+++ b/apidocs/deprecated-list.html
@@ -208,39 +208,64 @@
 
 
 
+org.apache.hadoop.hbase.HConstants.CP_HTD_ATTR_KEY_PATTERN
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
+
+org.apache.hadoop.hbase.HConstants.CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
+
+org.apache.hadoop.hbase.HConstants.CP_HTD_ATTR_VALUE_PARAM_PATTERN
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
+
+org.apache.hadoop.hbase.HConstants.CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
+
+org.apache.hadoop.hbase.HConstants.CP_HTD_ATTR_VALUE_PATTERN
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
+
 org.apache.hadoop.hbase.ServerLoad.EMPTY_SERVERLOAD
 As of release 2.0.0, this 
will be removed in HBase 3.0.0
  Use ServerMetricsBuilder.of(ServerName) 
instead.
 
 
-
+
 org.apache.hadoop.hbase.mapreduce.SimpleTotalOrderPartitioner.END
 
-
+
 org.apache.hadoop.hbase.HConstants.HBASE_REGIONSERVER_LEASE_PERIOD_KEY
 This config option is 
deprecated. Will be removed at later releases after 0.96.
 
 
-
+
 org.apache.hadoop.hbase.mapreduce.TableSplit.LOG
 LOG variable would be made 
private. fix in hbase 3.0
 
 
-
+
 org.apache.hadoop.hbase.HConstants.OLDEST_TIMESTAMP
 Should not be public since 
hbase-1.3.0. For internal use only. Move internal to
Scanners flagged as special timestamp value never to be returned as 
timestamp on a Cell.
 
 
-
+
 org.apache.hadoop.hbase.client.Scan.SCAN_ATTRIBUTES_METRICS_DATA
 
-
+
 org.apache.hadoop.hbase.client.Scan.SCAN_ATTRIBUTES_METRICS_ENABLE
 since 1.0.0. Use Scan.setScanMetricsEnabled(boolean)
 
 
-
+
 org.apache.hadoop.hbase.mapreduce.SimpleTotalOrderPartitioner.START
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/8b1eaec1/apidocs/index-all.html
--
diff --git a/apidocs/index-all.html b/apidocs/index-all.html
index b931886..d15240e 100644
--- a/apidocs/index-all.html
+++ b/apidocs/index-all.html
@@ -244,8 +244,6 @@
 
 Add a column family to an existing table.
 
-addColumnFamily(ColumnFamilyDescriptor)
 - Method in class org.apache.hadoop.hbase.client.TableDescriptorBuilder
- 
 addColumnFamilyAsync(TableName,
 ColumnFamilyDescriptor) - Method in interface 
org.apache.hadoop.hbase.client.Admin
 
 Add a column family to an existing table.
@@ -267,10 +265,6 @@
  
 addConfiguration(String,
 String) - Method in class org.apache.hadoop.hbase.NamespaceDescriptor.Builder
  
-addCoprocessor(String)
 - Method in class org.apache.hadoop.hbase.client.TableDescriptorBuilder
- 
-addCoprocessor(String,
 Path, int, Map) - Method in class 
org.apache.hadoop.hbase.client.TableDescriptorBuilder
- 
 addCoprocessor(String)
 - Method in class org.apache.hadoop.hbase.HTableDescriptor
 
 Deprecated.
@@ -281,8 +275,6 @@
 Deprecated.
 Add a table coprocessor to this table.
 
-addCoprocessorWithSpec(String)
 - Method in class org.apache.hadoop.hbase.client.TableDescriptorBuilder
- 
 addCoprocessorWithSpec(String)
 - Method in class org.apache.hadoop.hbase.HTableDescriptor
 
 Deprecated.
@@ -930,6 +922,8 @@
 
 build()
 - Method in class org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder
  
+build()
 - Method in class org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder
+ 
 build()
 - Method in interface org.apache.hadoop.hbase.client.TableBuilder
 
 Create the Table instance.
@@ -2326,6 +2320,14 @@
 
 COPROC
 - Static variable in class org.apache.hadoop.hbase.HBaseInterfaceAudience
  
+CoprocessorDescriptor - Interface in org.apache.hadoop.hbase.client
+
+CoprocessorDescriptor contains the details about how to 
build a coprocessor.
+
+CoprocessorDescriptorBuilder - Class in org.apache.hadoop.hbase.client
+
+Used to build the CoprocessorDescriptor
+
 CoprocessorException - Exception in org.apache.hadoop.hbase.coprocessor
 
 Thrown if a coprocessor encounters any exception.
@@ -2554,18 +2556,37 @@
  
 CP_HTD_ATTR_INCLUSION_KEY
 - Static variable in class org.apache.hadoop.hbase.HConstants
  
-CP_HTD_ATTR_KEY_PATTERN
 - Static variable in class org.apache.hadoop.hbase.HConstants
+CP_HTD_ATTR_KEY_PATTERN
 - Static variable in class org.apache.hadoop.hbase.client.TableDescriptorBuilder
  
+CP_HTD_ATTR_KEY_PATTERN
 - Static variable in class org.apache.hadoop.hbase.HConstants
+
+Deprecated.
+It is used internally. As 
of release 2.0.0, this will be removed in HBase 3.0.0.
+
+
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
 - Static variable in class org.apache

hbase git commit: HBASE-20202 [AMv2] Don't move region if its a split parent or offlined

2018-03-16 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 e0bdc87b2 -> 79d47dd57


HBASE-20202 [AMv2] Don't move region if its a split parent or offlined

M 
hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
M 
hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
 Allow passing cause to Constructor.

M hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
 Add prepare step to move procedure.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
 Add check that regions to merge are actually online to the Constructor
so we can fail fast if they are offline

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
 Add prepare step. Check regions and context and skip move if not right.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
 Add check parent region is online to constructor.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.java
 Add generic check region is online utility function for use by subclasses.

M 
hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMove.java
 Add test that we fail if we try to move an offlined region.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/79d47dd5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/79d47dd5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/79d47dd5

Branch: refs/heads/branch-2
Commit: 79d47dd57a9b3fbcbab203d3da797140427f45a5
Parents: e0bdc87
Author: Michael Stack 
Authored: Thu Mar 15 13:33:27 2018 -0700
Committer: Michael Stack 
Committed: Fri Mar 16 09:34:15 2018 -0700

--
 .../hbase/client/DoNotRetryRegionException.java |  3 ++
 .../hbase/exceptions/MergeRegionException.java  |  4 +++
 .../src/main/protobuf/MasterProcedure.proto |  1 +
 .../org/apache/hadoop/hbase/master/HMaster.java |  2 +-
 .../assignment/MergeTableRegionsProcedure.java  | 24 ++-
 .../master/assignment/MoveRegionProcedure.java  | 11 +++
 .../assignment/SplitTableRegionProcedure.java   |  3 ++
 .../AbstractStateMachineTableProcedure.java | 31 
 .../hbase/namespace/TestNamespaceAuditor.java   |  8 -
 .../TestRegionMergeTransactionOnCluster.java|  8 ++---
 .../hbase/regionserver/TestRegionMove.java  | 14 +
 .../TestSplitTransactionOnCluster.java  | 12 ++--
 12 files changed, 104 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/79d47dd5/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
index 06a0b3d..61ad5cd 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
@@ -37,4 +37,7 @@ public class DoNotRetryRegionException extends 
DoNotRetryIOException {
 super(s);
   }
 
+  public DoNotRetryRegionException(Throwable cause) {
+super(cause);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/79d47dd5/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
index e690084..5399f07 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
@@ -42,4 +42,8 @@ public class MergeRegionException extends 
DoNotRetryRegionException {
   public MergeRegionException(String s) {
 super(s);
   }
+
+  public MergeRegionException(Throwable cause) {
+super(cause);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/79d47dd5/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
--
diff --git a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto 
b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
index 1134bd6..9666c25 100644
--- a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
@@ -338,6 +338,7 @@ mes

hbase git commit: HBASE-20202 [AMv2] Don't move region if its a split parent or offlined

2018-03-16 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 22f4def94 -> 13f3ba3ce


HBASE-20202 [AMv2] Don't move region if its a split parent or offlined

M 
hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
M 
hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
 Allow passing cause to Constructor.

M hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
 Add prepare step to move procedure.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
 Add check that regions to merge are actually online to the Constructor
so we can fail fast if they are offline

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
 Add prepare step. Check regions and context and skip move if not right.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
 Add check parent region is online to constructor.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.java
 Add generic check region is online utility function for use by subclasses.

M 
hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMove.java
 Add test that we fail if we try to move an offlined region.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/13f3ba3c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/13f3ba3c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/13f3ba3c

Branch: refs/heads/master
Commit: 13f3ba3cee4d8a358c3c5ba653fc12643f000aa7
Parents: 22f4def
Author: Michael Stack 
Authored: Thu Mar 15 13:33:27 2018 -0700
Committer: Michael Stack 
Committed: Fri Mar 16 09:35:33 2018 -0700

--
 .../hbase/client/DoNotRetryRegionException.java |  3 ++
 .../hbase/exceptions/MergeRegionException.java  |  4 +++
 .../src/main/protobuf/MasterProcedure.proto |  1 +
 .../org/apache/hadoop/hbase/master/HMaster.java |  2 +-
 .../assignment/MergeTableRegionsProcedure.java  | 24 ++-
 .../master/assignment/MoveRegionProcedure.java  | 11 +++
 .../assignment/SplitTableRegionProcedure.java   |  3 ++
 .../AbstractStateMachineTableProcedure.java | 31 
 .../hbase/namespace/TestNamespaceAuditor.java   |  8 -
 .../TestRegionMergeTransactionOnCluster.java|  8 ++---
 .../hbase/regionserver/TestRegionMove.java  | 14 +
 .../TestSplitTransactionOnCluster.java  | 12 ++--
 12 files changed, 104 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/13f3ba3c/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
index 06a0b3d..61ad5cd 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
@@ -37,4 +37,7 @@ public class DoNotRetryRegionException extends 
DoNotRetryIOException {
 super(s);
   }
 
+  public DoNotRetryRegionException(Throwable cause) {
+super(cause);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/13f3ba3c/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
index e690084..5399f07 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
@@ -42,4 +42,8 @@ public class MergeRegionException extends 
DoNotRetryRegionException {
   public MergeRegionException(String s) {
 super(s);
   }
+
+  public MergeRegionException(Throwable cause) {
+super(cause);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/13f3ba3c/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
--
diff --git a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto 
b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
index 1134bd6..9666c25 100644
--- a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
@@ -338,6 +338,7 @@ message

hbase git commit: HBASE-20202 [AMv2] Don't move region if its a split parent or offlined

2018-03-16 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 a2fccc36e -> a18fb33d5


HBASE-20202 [AMv2] Don't move region if its a split parent or offlined

M 
hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
M 
hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
 Allow passing cause to Constructor.

M hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
 Add prepare step to move procedure.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
 Add check that regions to merge are actually online to the Constructor
so we can fail fast if they are offline

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
 Add prepare step. Check regions and context and skip move if not right.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/SplitTableRegionProcedure.java
 Add check parent region is online to constructor.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.java
 Add generic check region is online utility function for use by subclasses.

M 
hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionMove.java
 Add test that we fail if we try to move an offlined region.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a18fb33d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a18fb33d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a18fb33d

Branch: refs/heads/branch-2.0
Commit: a18fb33d522a9a20ebdb589c2b9036ee5a15da34
Parents: a2fccc3
Author: Michael Stack 
Authored: Thu Mar 15 13:33:27 2018 -0700
Committer: Michael Stack 
Committed: Fri Mar 16 09:35:13 2018 -0700

--
 .../hbase/client/DoNotRetryRegionException.java |  3 ++
 .../hbase/exceptions/MergeRegionException.java  |  4 +++
 .../src/main/protobuf/MasterProcedure.proto |  1 +
 .../org/apache/hadoop/hbase/master/HMaster.java |  2 +-
 .../assignment/MergeTableRegionsProcedure.java  | 24 ++-
 .../master/assignment/MoveRegionProcedure.java  | 11 +++
 .../assignment/SplitTableRegionProcedure.java   |  3 ++
 .../AbstractStateMachineTableProcedure.java | 31 
 .../hbase/namespace/TestNamespaceAuditor.java   |  8 -
 .../TestRegionMergeTransactionOnCluster.java|  8 ++---
 .../hbase/regionserver/TestRegionMove.java  | 14 +
 .../TestSplitTransactionOnCluster.java  | 12 ++--
 12 files changed, 104 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a18fb33d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
index 06a0b3d..61ad5cd 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/DoNotRetryRegionException.java
@@ -37,4 +37,7 @@ public class DoNotRetryRegionException extends 
DoNotRetryIOException {
 super(s);
   }
 
+  public DoNotRetryRegionException(Throwable cause) {
+super(cause);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a18fb33d/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
index e690084..5399f07 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/MergeRegionException.java
@@ -42,4 +42,8 @@ public class MergeRegionException extends 
DoNotRetryRegionException {
   public MergeRegionException(String s) {
 super(s);
   }
+
+  public MergeRegionException(Throwable cause) {
+super(cause);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a18fb33d/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
--
diff --git a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto 
b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
index 8f03a9f..7415cbf 100644
--- a/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/MasterProcedure.proto
@@ -337,6 +337,7 @@

hbase git commit: [HBASE-20141] Fix TooManyFiles exception when RefreshingChannels

2018-03-16 Thread zyork
Repository: hbase
Updated Branches:
  refs/heads/master 13f3ba3ce -> aaa90d806


[HBASE-20141] Fix TooManyFiles exception when RefreshingChannels

HBASE-19435 implements a fix for reopening file channels when they are 
unnexpected closed
to avoid disabling the BucketCache. However, it was missed that the the 
channels might not
actually be completely closed (the write or read channel might still be open
(see 
https://docs.oracle.com/javase/7/docs/api/java/nio/channels/ClosedChannelException.html)
This commit closes any open channels before creating a new channel.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/aaa90d80
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/aaa90d80
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/aaa90d80

Branch: refs/heads/master
Commit: aaa90d80690114ca3300084cdba977024b5a7fd5
Parents: 13f3ba3
Author: Zach York 
Authored: Wed Feb 28 10:40:38 2018 -0800
Committer: Zach York 
Committed: Fri Mar 16 10:51:39 2018 -0700

--
 .../hadoop/hbase/io/hfile/bucket/FileIOEngine.java | 13 +++--
 .../hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java | 11 +++
 2 files changed, 22 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/aaa90d80/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
index cf963f0..648d4bc 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hbase.io.hfile.bucket;
 
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.RandomAccessFile;
 import java.nio.ByteBuffer;
@@ -274,7 +273,17 @@ public class FileIOEngine implements IOEngine {
 return fileNum;
   }
 
-  private void refreshFileConnection(int accessFileNum) throws 
FileNotFoundException {
+  @VisibleForTesting
+  FileChannel[] getFileChannels() {
+return fileChannels;
+  }
+
+  @VisibleForTesting
+  void refreshFileConnection(int accessFileNum) throws IOException {
+FileChannel fileChannel = fileChannels[accessFileNum];
+if (fileChannel != null) {
+  fileChannel.close();
+}
 rafs[accessFileNum] = new RandomAccessFile(filePaths[accessFileNum], "rw");
 fileChannels[accessFileNum] = rafs[accessFileNum].getChannel();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/aaa90d80/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
index 5086265..6480986 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
@@ -18,10 +18,13 @@
 package org.apache.hadoop.hbase.io.hfile.bucket;
 
 import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
 
 import java.io.File;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
 import java.util.ArrayList;
 import java.util.List;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
@@ -138,4 +141,12 @@ public class TestFileIOEngine {
 ByteBuff data2 = deserializer.getDeserializedByteBuff();
 assertArrayEquals(data1, data2.array());
   }
+
+  @Test
+  public void testRefreshFileConnectionClosesConnections() throws IOException {
+FileChannel fileChannel = fileIOEngine.getFileChannels()[0];
+assertNotNull(fileChannel);
+fileIOEngine.refreshFileConnection(0);
+assertFalse(fileChannel.isOpen());
+  }
 }



hbase git commit: [HBASE-20141] Fix TooManyFiles exception when RefreshingChannels

2018-03-16 Thread zyork
Repository: hbase
Updated Branches:
  refs/heads/branch-2 79d47dd57 -> 6bf967adf


[HBASE-20141] Fix TooManyFiles exception when RefreshingChannels

HBASE-19435 implements a fix for reopening file channels when they are 
unnexpected closed
to avoid disabling the BucketCache. However, it was missed that the the 
channels might not
actually be completely closed (the write or read channel might still be open
(see 
https://docs.oracle.com/javase/7/docs/api/java/nio/channels/ClosedChannelException.html)
This commit closes any open channels before creating a new channel.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6bf967ad
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6bf967ad
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6bf967ad

Branch: refs/heads/branch-2
Commit: 6bf967adfb53001365bf4f9d5a72a1e095ee2900
Parents: 79d47dd
Author: Zach York 
Authored: Wed Feb 28 10:40:38 2018 -0800
Committer: Zach York 
Committed: Fri Mar 16 10:54:43 2018 -0700

--
 .../hadoop/hbase/io/hfile/bucket/FileIOEngine.java | 13 +++--
 .../hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java | 11 +++
 2 files changed, 22 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6bf967ad/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
index cf963f0..648d4bc 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hbase.io.hfile.bucket;
 
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.RandomAccessFile;
 import java.nio.ByteBuffer;
@@ -274,7 +273,17 @@ public class FileIOEngine implements IOEngine {
 return fileNum;
   }
 
-  private void refreshFileConnection(int accessFileNum) throws 
FileNotFoundException {
+  @VisibleForTesting
+  FileChannel[] getFileChannels() {
+return fileChannels;
+  }
+
+  @VisibleForTesting
+  void refreshFileConnection(int accessFileNum) throws IOException {
+FileChannel fileChannel = fileChannels[accessFileNum];
+if (fileChannel != null) {
+  fileChannel.close();
+}
 rafs[accessFileNum] = new RandomAccessFile(filePaths[accessFileNum], "rw");
 fileChannels[accessFileNum] = rafs[accessFileNum].getChannel();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/6bf967ad/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
index 5086265..6480986 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
@@ -18,10 +18,13 @@
 package org.apache.hadoop.hbase.io.hfile.bucket;
 
 import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
 
 import java.io.File;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
 import java.util.ArrayList;
 import java.util.List;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
@@ -138,4 +141,12 @@ public class TestFileIOEngine {
 ByteBuff data2 = deserializer.getDeserializedByteBuff();
 assertArrayEquals(data1, data2.array());
   }
+
+  @Test
+  public void testRefreshFileConnectionClosesConnections() throws IOException {
+FileChannel fileChannel = fileIOEngine.getFileChannels()[0];
+assertNotNull(fileChannel);
+fileIOEngine.refreshFileConnection(0);
+assertFalse(fileChannel.isOpen());
+  }
 }



hbase git commit: [HBASE-20141] Fix TooManyFiles exception when RefreshingChannels

2018-03-16 Thread zyork
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 a18fb33d5 -> 10a7b5ea1


[HBASE-20141] Fix TooManyFiles exception when RefreshingChannels

HBASE-19435 implements a fix for reopening file channels when they are 
unnexpected closed
to avoid disabling the BucketCache. However, it was missed that the the 
channels might not
actually be completely closed (the write or read channel might still be open
(see 
https://docs.oracle.com/javase/7/docs/api/java/nio/channels/ClosedChannelException.html)
This commit closes any open channels before creating a new channel.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/10a7b5ea
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/10a7b5ea
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/10a7b5ea

Branch: refs/heads/branch-2.0
Commit: 10a7b5ea1591e93fff39a0c9fc35ee2c8805d49f
Parents: a18fb33
Author: Zach York 
Authored: Wed Feb 28 10:40:38 2018 -0800
Committer: Zach York 
Committed: Fri Mar 16 10:56:06 2018 -0700

--
 .../hadoop/hbase/io/hfile/bucket/FileIOEngine.java | 13 +++--
 .../hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java | 11 +++
 2 files changed, 22 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/10a7b5ea/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
index cf963f0..648d4bc 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hbase.io.hfile.bucket;
 
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.RandomAccessFile;
 import java.nio.ByteBuffer;
@@ -274,7 +273,17 @@ public class FileIOEngine implements IOEngine {
 return fileNum;
   }
 
-  private void refreshFileConnection(int accessFileNum) throws 
FileNotFoundException {
+  @VisibleForTesting
+  FileChannel[] getFileChannels() {
+return fileChannels;
+  }
+
+  @VisibleForTesting
+  void refreshFileConnection(int accessFileNum) throws IOException {
+FileChannel fileChannel = fileChannels[accessFileNum];
+if (fileChannel != null) {
+  fileChannel.close();
+}
 rafs[accessFileNum] = new RandomAccessFile(filePaths[accessFileNum], "rw");
 fileChannels[accessFileNum] = rafs[accessFileNum].getChannel();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/10a7b5ea/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
index 5086265..6480986 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
@@ -18,10 +18,13 @@
 package org.apache.hadoop.hbase.io.hfile.bucket;
 
 import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
 
 import java.io.File;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
 import java.util.ArrayList;
 import java.util.List;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
@@ -138,4 +141,12 @@ public class TestFileIOEngine {
 ByteBuff data2 = deserializer.getDeserializedByteBuff();
 assertArrayEquals(data1, data2.array());
   }
+
+  @Test
+  public void testRefreshFileConnectionClosesConnections() throws IOException {
+FileChannel fileChannel = fileIOEngine.getFileChannels()[0];
+assertNotNull(fileChannel);
+fileIOEngine.refreshFileConnection(0);
+assertFalse(fileChannel.isOpen());
+  }
 }



hbase git commit: [HBASE-20141] Fix TooManyFiles exception when RefreshingChannels

2018-03-16 Thread zyork
Repository: hbase
Updated Branches:
  refs/heads/branch-1 a0b2141a9 -> 009295a3a


[HBASE-20141] Fix TooManyFiles exception when RefreshingChannels

HBASE-19435 implements a fix for reopening file channels when they are 
unnexpected closed
to avoid disabling the BucketCache. However, it was missed that the the 
channels might not
actually be completely closed (the write or read channel might still be open
(see 
https://docs.oracle.com/javase/7/docs/api/java/nio/channels/ClosedChannelException.html)
This commit closes any open channels before creating a new channel.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/009295a3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/009295a3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/009295a3

Branch: refs/heads/branch-1
Commit: 009295a3a69375822adafed1e993e80d45c680ce
Parents: a0b2141
Author: Zach York 
Authored: Wed Feb 28 10:40:38 2018 -0800
Committer: Zach York 
Committed: Fri Mar 16 10:58:22 2018 -0700

--
 .../hadoop/hbase/io/hfile/bucket/FileIOEngine.java | 13 +++--
 .../hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java | 11 +++
 2 files changed, 22 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/009295a3/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
index cb454d4..7b773bd 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hbase.io.hfile.bucket;
 
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.RandomAccessFile;
 import java.nio.ByteBuffer;
@@ -234,7 +233,17 @@ public class FileIOEngine implements IOEngine {
 return fileNum;
   }
 
-  private void refreshFileConnection(int accessFileNum) throws 
FileNotFoundException {
+  @VisibleForTesting
+  FileChannel[] getFileChannels() {
+return fileChannels;
+  }
+
+  @VisibleForTesting
+  void refreshFileConnection(int accessFileNum) throws IOException {
+FileChannel fileChannel = fileChannels[accessFileNum];
+if (fileChannel != null) {
+  fileChannel.close();
+}
 rafs[accessFileNum] = new RandomAccessFile(filePaths[accessFileNum], "rw");
 fileChannels[accessFileNum] = rafs[accessFileNum].getChannel();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/009295a3/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
index adf7fd0..8c2bc6e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
@@ -19,10 +19,13 @@
 package org.apache.hadoop.hbase.io.hfile.bucket;
 
 import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
 
 import java.io.File;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -129,4 +132,12 @@ public class TestFileIOEngine {
 fileIOEngine.read(ByteBuffer.wrap(data2), offset);
 assertArrayEquals(data1, data2);
   }
+
+  @Test
+  public void testRefreshFileConnectionClosesConnections() throws IOException {
+FileChannel fileChannel = fileIOEngine.getFileChannels()[0];
+assertNotNull(fileChannel);
+fileIOEngine.refreshFileConnection(0);
+assertFalse(fileChannel.isOpen());
+  }
 }



hbase git commit: [HBASE-20141] Fix TooManyFiles exception when RefreshingChannels

2018-03-16 Thread zyork
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 ac0fb1ce8 -> 51a35aafc


[HBASE-20141] Fix TooManyFiles exception when RefreshingChannels

HBASE-19435 implements a fix for reopening file channels when they are 
unnexpected closed
to avoid disabling the BucketCache. However, it was missed that the the 
channels might not
actually be completely closed (the write or read channel might still be open
(see 
https://docs.oracle.com/javase/7/docs/api/java/nio/channels/ClosedChannelException.html)
This commit closes any open channels before creating a new channel.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/51a35aaf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/51a35aaf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/51a35aaf

Branch: refs/heads/branch-1.4
Commit: 51a35aafcdac7954fd6851958851c13db417
Parents: ac0fb1c
Author: Zach York 
Authored: Wed Feb 28 10:40:38 2018 -0800
Committer: Zach York 
Committed: Fri Mar 16 11:02:58 2018 -0700

--
 .../hadoop/hbase/io/hfile/bucket/FileIOEngine.java | 13 +++--
 .../hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java | 11 +++
 2 files changed, 22 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/51a35aaf/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
index cb454d4..7b773bd 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/FileIOEngine.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hbase.io.hfile.bucket;
 
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.RandomAccessFile;
 import java.nio.ByteBuffer;
@@ -234,7 +233,17 @@ public class FileIOEngine implements IOEngine {
 return fileNum;
   }
 
-  private void refreshFileConnection(int accessFileNum) throws 
FileNotFoundException {
+  @VisibleForTesting
+  FileChannel[] getFileChannels() {
+return fileChannels;
+  }
+
+  @VisibleForTesting
+  void refreshFileConnection(int accessFileNum) throws IOException {
+FileChannel fileChannel = fileChannels[accessFileNum];
+if (fileChannel != null) {
+  fileChannel.close();
+}
 rafs[accessFileNum] = new RandomAccessFile(filePaths[accessFileNum], "rw");
 fileChannels[accessFileNum] = rafs[accessFileNum].getChannel();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/51a35aaf/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
index adf7fd0..8c2bc6e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestFileIOEngine.java
@@ -19,10 +19,13 @@
 package org.apache.hadoop.hbase.io.hfile.bucket;
 
 import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
 
 import java.io.File;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -129,4 +132,12 @@ public class TestFileIOEngine {
 fileIOEngine.read(ByteBuffer.wrap(data2), offset);
 assertArrayEquals(data1, data2);
   }
+
+  @Test
+  public void testRefreshFileConnectionClosesConnections() throws IOException {
+FileChannel fileChannel = fileIOEngine.getFileChannels()[0];
+assertNotNull(fileChannel);
+fileIOEngine.refreshFileConnection(0);
+assertFalse(fileChannel.isOpen());
+  }
 }



hbase git commit: HBASE-20213 [LOGGING] Aligning formatting and logging less (compactions, in-memory compactions)

2018-03-16 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 6bf967adf -> 3f1c86786


HBASE-20213 [LOGGING] Aligning formatting and logging less (compactions,
in-memory compactions)

Log less. Log using same format as used elsewhere in log.

Align logs in HFileArchiver with how we format elsewhere. Removed
redundant 'region' qualifiers, tried to tighten up the emissions so
easier to read the long lines.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
 Add a label for each of the chunkcreators we make (I was confused by
two chunk creater stats emissions in log file -- didn't know that one
was for data and the other index).

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
 Formatting. Log less.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.java
 Make the emissions in here trace-level. When more than a few regions,
log is filled with this stuff.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3f1c8678
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3f1c8678
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3f1c8678

Branch: refs/heads/branch-2
Commit: 3f1c86786c5fd8b93f619ba40ad6d5499c3a8252
Parents: 6bf967a
Author: Michael Stack 
Authored: Thu Mar 15 20:18:46 2018 -0700
Committer: Michael Stack 
Committed: Fri Mar 16 13:07:34 2018 -0700

--
 .../hbase/client/AsyncRequestFutureImpl.java| 19 +++---
 .../hadoop/hbase/ipc/NettyRpcConnection.java|  4 +-
 .../hbase/procedure2/ProcedureExecutor.java |  3 +-
 .../replication/ZKReplicationQueueStorage.java  | 70 +++-
 .../hadoop/hbase/backup/HFileArchiver.java  | 61 -
 .../org/apache/hadoop/hbase/master/HMaster.java |  4 +-
 .../master/balancer/StochasticLoadBalancer.java |  2 +-
 .../procedure/MasterProcedureScheduler.java |  2 +-
 .../hadoop/hbase/regionserver/ChunkCreator.java | 52 +++
 .../hadoop/hbase/regionserver/CompactSplit.java | 16 ++---
 .../hbase/regionserver/CompactingMemStore.java  |  5 +-
 .../hadoop/hbase/regionserver/HRegion.java  |  4 +-
 .../MemStoreCompactionStrategy.java |  8 +--
 .../hbase/regionserver/MemStoreCompactor.java   |  6 +-
 .../hadoop/hbase/regionserver/StoreScanner.java |  6 +-
 .../regionserver/compactions/Compactor.java | 17 +++--
 16 files changed, 124 insertions(+), 155 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3f1c8678/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
index a8b8ebf..c3cb866 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
@@ -97,7 +97,7 @@ class AsyncRequestFutureImpl implements 
AsyncRequestFuture {
 try {
   done = waitUntilDone(startTime * 1000L + 
asyncProcess.primaryCallTimeoutMicroseconds);
 } catch (InterruptedException ex) {
-  LOG.error("Replica thread was interrupted - no replica calls: " + 
ex.getMessage());
+  LOG.error("Replica thread interrupted - no replica calls {}", 
ex.getMessage());
   return;
 }
   }
@@ -141,7 +141,7 @@ class AsyncRequestFutureImpl implements 
AsyncRequestFuture {
   if (loc == null) return;
   HRegionLocation[] locs = loc.getRegionLocations();
   if (locs.length == 1) {
-LOG.warn("No replicas found for " + action.getAction());
+LOG.warn("No replicas found for {}", action.getAction());
 return;
   }
   synchronized (replicaResultLock) {
@@ -222,8 +222,8 @@ class AsyncRequestFutureImpl implements 
AsyncRequestFuture {
   return;
 } catch (Throwable t) {
   // This should not happen. Let's log & retry anyway.
-  LOG.error("#" + asyncProcess.id + ", Caught throwable while calling. 
This is unexpected." +
-  " Retrying. Server is " + server + ", tableName=" + tableName, 
t);
+  LOG.error("id=" + asyncProcess.id + ", caught throwable. 
Unexpected." +
+  " Retrying. Server=" + server + ", tableName=" + tableName, t);
   receiveGlobalFailure(multiAction, server, numAttempt, t);
   return;
 }
@@ -239,8 +239,7 @@ class AsyncRequestFutureImpl implements 
AsyncRequestFuture {
 }
   } catch (Throwable t) {
 // Something really bad happened. We are on the send thread that will 
now die.
-LOG.er

hbase git commit: HBASE-20213 [LOGGING] Aligning formatting and logging less (compactions, in-memory compactions)

2018-03-16 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 10a7b5ea1 -> 7e0914759


HBASE-20213 [LOGGING] Aligning formatting and logging less (compactions,
in-memory compactions)

Log less. Log using same format as used elsewhere in log.

Align logs in HFileArchiver with how we format elsewhere. Removed
redundant 'region' qualifiers, tried to tighten up the emissions so
easier to read the long lines.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
 Add a label for each of the chunkcreators we make (I was confused by
two chunk creater stats emissions in log file -- didn't know that one
was for data and the other index).

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
 Formatting. Log less.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.java
 Make the emissions in here trace-level. When more than a few regions,
log is filled with this stuff.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7e091475
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7e091475
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7e091475

Branch: refs/heads/branch-2.0
Commit: 7e09147596a42afbbd57981cced913414aeebc46
Parents: 10a7b5e
Author: Michael Stack 
Authored: Thu Mar 15 20:18:46 2018 -0700
Committer: Michael Stack 
Committed: Fri Mar 16 13:15:42 2018 -0700

--
 .../hbase/client/AsyncRequestFutureImpl.java| 19 +++---
 .../hadoop/hbase/ipc/NettyRpcConnection.java|  4 +-
 .../hbase/procedure2/ProcedureExecutor.java |  3 +-
 .../hadoop/hbase/backup/HFileArchiver.java  | 61 +---
 .../org/apache/hadoop/hbase/master/HMaster.java |  4 +-
 .../master/balancer/StochasticLoadBalancer.java |  2 +-
 .../procedure/MasterProcedureScheduler.java |  2 +-
 .../hadoop/hbase/regionserver/ChunkCreator.java | 52 -
 .../hadoop/hbase/regionserver/CompactSplit.java | 16 ++---
 .../hbase/regionserver/CompactingMemStore.java  |  5 +-
 .../hadoop/hbase/regionserver/HRegion.java  |  4 +-
 .../MemStoreCompactionStrategy.java |  8 +--
 .../hbase/regionserver/MemStoreCompactor.java   |  6 +-
 .../hadoop/hbase/regionserver/StoreScanner.java |  6 +-
 .../regionserver/compactions/Compactor.java | 17 +++---
 15 files changed, 99 insertions(+), 110 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7e091475/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
index a8b8ebf..c3cb866 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
@@ -97,7 +97,7 @@ class AsyncRequestFutureImpl implements 
AsyncRequestFuture {
 try {
   done = waitUntilDone(startTime * 1000L + 
asyncProcess.primaryCallTimeoutMicroseconds);
 } catch (InterruptedException ex) {
-  LOG.error("Replica thread was interrupted - no replica calls: " + 
ex.getMessage());
+  LOG.error("Replica thread interrupted - no replica calls {}", 
ex.getMessage());
   return;
 }
   }
@@ -141,7 +141,7 @@ class AsyncRequestFutureImpl implements 
AsyncRequestFuture {
   if (loc == null) return;
   HRegionLocation[] locs = loc.getRegionLocations();
   if (locs.length == 1) {
-LOG.warn("No replicas found for " + action.getAction());
+LOG.warn("No replicas found for {}", action.getAction());
 return;
   }
   synchronized (replicaResultLock) {
@@ -222,8 +222,8 @@ class AsyncRequestFutureImpl implements 
AsyncRequestFuture {
   return;
 } catch (Throwable t) {
   // This should not happen. Let's log & retry anyway.
-  LOG.error("#" + asyncProcess.id + ", Caught throwable while calling. 
This is unexpected." +
-  " Retrying. Server is " + server + ", tableName=" + tableName, 
t);
+  LOG.error("id=" + asyncProcess.id + ", caught throwable. 
Unexpected." +
+  " Retrying. Server=" + server + ", tableName=" + tableName, t);
   receiveGlobalFailure(multiAction, server, numAttempt, t);
   return;
 }
@@ -239,8 +239,7 @@ class AsyncRequestFutureImpl implements 
AsyncRequestFuture {
 }
   } catch (Throwable t) {
 // Something really bad happened. We are on the send thread that will 
now die.
-LOG.error("Internal AsyncProcess #" + asyncProcess.id + " error for "
- 

hbase git commit: HBASE-20213 [LOGGING] Aligning formatting and logging less (compactions, in-memory compactions)

2018-03-16 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master aaa90d806 -> bedf849d8


HBASE-20213 [LOGGING] Aligning formatting and logging less (compactions,
in-memory compactions)

Log less. Log using same format as used elsewhere in log.

Align logs in HFileArchiver with how we format elsewhere. Removed
redundant 'region' qualifiers, tried to tighten up the emissions so
easier to read the long lines.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
 Add a label for each of the chunkcreators we make (I was confused by
two chunk creater stats emissions in log file -- didn't know that one
was for data and the other index).

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
 Formatting. Log less.

M 
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreCompactionStrategy.java
 Make the emissions in here trace-level. When more than a few regions,
log is filled with this stuff.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bedf849d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bedf849d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bedf849d

Branch: refs/heads/master
Commit: bedf849d83ef2a6b092a57f492ab5b08a7d71b0c
Parents: aaa90d8
Author: Michael Stack 
Authored: Thu Mar 15 20:18:46 2018 -0700
Committer: Michael Stack 
Committed: Fri Mar 16 13:16:49 2018 -0700

--
 .../hbase/client/AsyncRequestFutureImpl.java| 19 +++---
 .../hadoop/hbase/ipc/NettyRpcConnection.java|  4 +-
 .../hbase/procedure2/ProcedureExecutor.java |  3 +-
 .../replication/ZKReplicationQueueStorage.java  | 70 +++-
 .../hadoop/hbase/backup/HFileArchiver.java  | 61 -
 .../org/apache/hadoop/hbase/master/HMaster.java |  4 +-
 .../master/balancer/StochasticLoadBalancer.java |  2 +-
 .../procedure/MasterProcedureScheduler.java |  2 +-
 .../hadoop/hbase/regionserver/ChunkCreator.java | 46 +++--
 .../hadoop/hbase/regionserver/CompactSplit.java | 16 ++---
 .../hbase/regionserver/CompactingMemStore.java  |  5 +-
 .../hadoop/hbase/regionserver/HRegion.java  |  4 +-
 .../MemStoreCompactionStrategy.java |  8 +--
 .../hbase/regionserver/MemStoreCompactor.java   |  6 +-
 .../hadoop/hbase/regionserver/StoreScanner.java |  6 +-
 .../regionserver/compactions/Compactor.java | 17 +++--
 16 files changed, 122 insertions(+), 151 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bedf849d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
index a8b8ebf..c3cb866 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
@@ -97,7 +97,7 @@ class AsyncRequestFutureImpl implements 
AsyncRequestFuture {
 try {
   done = waitUntilDone(startTime * 1000L + 
asyncProcess.primaryCallTimeoutMicroseconds);
 } catch (InterruptedException ex) {
-  LOG.error("Replica thread was interrupted - no replica calls: " + 
ex.getMessage());
+  LOG.error("Replica thread interrupted - no replica calls {}", 
ex.getMessage());
   return;
 }
   }
@@ -141,7 +141,7 @@ class AsyncRequestFutureImpl implements 
AsyncRequestFuture {
   if (loc == null) return;
   HRegionLocation[] locs = loc.getRegionLocations();
   if (locs.length == 1) {
-LOG.warn("No replicas found for " + action.getAction());
+LOG.warn("No replicas found for {}", action.getAction());
 return;
   }
   synchronized (replicaResultLock) {
@@ -222,8 +222,8 @@ class AsyncRequestFutureImpl implements 
AsyncRequestFuture {
   return;
 } catch (Throwable t) {
   // This should not happen. Let's log & retry anyway.
-  LOG.error("#" + asyncProcess.id + ", Caught throwable while calling. 
This is unexpected." +
-  " Retrying. Server is " + server + ", tableName=" + tableName, 
t);
+  LOG.error("id=" + asyncProcess.id + ", caught throwable. 
Unexpected." +
+  " Retrying. Server=" + server + ", tableName=" + tableName, t);
   receiveGlobalFailure(multiAction, server, numAttempt, t);
   return;
 }
@@ -239,8 +239,7 @@ class AsyncRequestFutureImpl implements 
AsyncRequestFuture {
 }
   } catch (Throwable t) {
 // Something really bad happened. We are on the send thread that will 
now die.
-LOG.error("I

[2/3] hbase-thirdparty git commit: Merge tag '2.0.0RC0'

2018-03-16 Thread elserj
Merge tag '2.0.0RC0'

hbase-thirdparty 2.0.0 release candidate 0


Project: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/commit/f906c721
Tree: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/tree/f906c721
Diff: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/diff/f906c721

Branch: refs/heads/master
Commit: f906c7216dd35136e7da04ca91f7da7af72e96ed
Parents: 3d380f1 2b55dc7
Author: Josh Elser 
Authored: Fri Mar 16 16:33:12 2018 -0400
Committer: Josh Elser 
Committed: Fri Mar 16 16:33:12 2018 -0400

--
 hbase-shaded-miscellaneous/pom.xml | 2 +-
 hbase-shaded-netty/pom.xml | 2 +-
 hbase-shaded-protobuf/pom.xml  | 2 +-
 pom.xml| 2 +-
 4 files changed, 4 insertions(+), 4 deletions(-)
--




[3/3] hbase-thirdparty git commit: Update to 2.0.1-SNAPSHOT

2018-03-16 Thread elserj
Update to 2.0.1-SNAPSHOT


Project: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/commit/94f75fbb
Tree: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/tree/94f75fbb
Diff: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/diff/94f75fbb

Branch: refs/heads/master
Commit: 94f75fbb65ffcbf895dbd139328a9cc9361bbd3d
Parents: f906c72
Author: Josh Elser 
Authored: Fri Mar 16 19:02:47 2018 -0400
Committer: Josh Elser 
Committed: Fri Mar 16 19:02:47 2018 -0400

--
 hbase-shaded-miscellaneous/pom.xml | 2 +-
 hbase-shaded-netty/pom.xml | 2 +-
 hbase-shaded-protobuf/pom.xml  | 2 +-
 pom.xml| 2 +-
 4 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/94f75fbb/hbase-shaded-miscellaneous/pom.xml
--
diff --git a/hbase-shaded-miscellaneous/pom.xml 
b/hbase-shaded-miscellaneous/pom.xml
index 960ca39..0a985c2 100644
--- a/hbase-shaded-miscellaneous/pom.xml
+++ b/hbase-shaded-miscellaneous/pom.xml
@@ -32,7 +32,7 @@
   
 org.apache.hbase.thirdparty
 hbase-thirdparty
-2.0.0
+2.0.1-SNAPSHOT
 ..
   
   hbase-shaded-miscellaneous

http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/94f75fbb/hbase-shaded-netty/pom.xml
--
diff --git a/hbase-shaded-netty/pom.xml b/hbase-shaded-netty/pom.xml
index 6ebff35..1d8265d 100644
--- a/hbase-shaded-netty/pom.xml
+++ b/hbase-shaded-netty/pom.xml
@@ -32,7 +32,7 @@
   
 org.apache.hbase.thirdparty
 hbase-thirdparty
-2.0.0
+2.0.1-SNAPSHOT
 ..
   
   hbase-shaded-netty

http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/94f75fbb/hbase-shaded-protobuf/pom.xml
--
diff --git a/hbase-shaded-protobuf/pom.xml b/hbase-shaded-protobuf/pom.xml
index d672759..b60890b 100644
--- a/hbase-shaded-protobuf/pom.xml
+++ b/hbase-shaded-protobuf/pom.xml
@@ -23,7 +23,7 @@
   
 org.apache.hbase.thirdparty
 hbase-thirdparty
-2.0.0
+2.0.1-SNAPSHOT
 ..
   
   hbase-shaded-protobuf

http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/94f75fbb/pom.xml
--
diff --git a/pom.xml b/pom.xml
index f1c6b60..f0d4693 100644
--- a/pom.xml
+++ b/pom.xml
@@ -38,7 +38,7 @@
   
   org.apache.hbase.thirdparty
   hbase-thirdparty
-  2.0.0
+  2.0.1-SNAPSHOT
   Apache HBase Third-Party Libs
   pom
   



[1/3] hbase-thirdparty git commit: update project version for release

2018-03-16 Thread elserj
Repository: hbase-thirdparty
Updated Branches:
  refs/heads/master 3d380f128 -> 94f75fbb6


update project version for release


Project: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/commit/2b55dc79
Tree: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/tree/2b55dc79
Diff: http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/diff/2b55dc79

Branch: refs/heads/master
Commit: 2b55dc792196a12d9a365a758a518f26c459391e
Parents: 3d380f1
Author: Mike Drob 
Authored: Mon Dec 18 15:29:02 2017 -0600
Committer: Mike Drob 
Committed: Tue Dec 19 11:23:39 2017 -0600

--
 hbase-shaded-miscellaneous/pom.xml | 2 +-
 hbase-shaded-netty/pom.xml | 2 +-
 hbase-shaded-protobuf/pom.xml  | 2 +-
 pom.xml| 2 +-
 4 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/2b55dc79/hbase-shaded-miscellaneous/pom.xml
--
diff --git a/hbase-shaded-miscellaneous/pom.xml 
b/hbase-shaded-miscellaneous/pom.xml
index aec16f2..960ca39 100644
--- a/hbase-shaded-miscellaneous/pom.xml
+++ b/hbase-shaded-miscellaneous/pom.xml
@@ -32,7 +32,7 @@
   
 org.apache.hbase.thirdparty
 hbase-thirdparty
-1.0.1
+2.0.0
 ..
   
   hbase-shaded-miscellaneous

http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/2b55dc79/hbase-shaded-netty/pom.xml
--
diff --git a/hbase-shaded-netty/pom.xml b/hbase-shaded-netty/pom.xml
index 59ee5f9..6ebff35 100644
--- a/hbase-shaded-netty/pom.xml
+++ b/hbase-shaded-netty/pom.xml
@@ -32,7 +32,7 @@
   
 org.apache.hbase.thirdparty
 hbase-thirdparty
-1.0.1
+2.0.0
 ..
   
   hbase-shaded-netty

http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/2b55dc79/hbase-shaded-protobuf/pom.xml
--
diff --git a/hbase-shaded-protobuf/pom.xml b/hbase-shaded-protobuf/pom.xml
index d762786..d672759 100644
--- a/hbase-shaded-protobuf/pom.xml
+++ b/hbase-shaded-protobuf/pom.xml
@@ -23,7 +23,7 @@
   
 org.apache.hbase.thirdparty
 hbase-thirdparty
-1.0.1
+2.0.0
 ..
   
   hbase-shaded-protobuf

http://git-wip-us.apache.org/repos/asf/hbase-thirdparty/blob/2b55dc79/pom.xml
--
diff --git a/pom.xml b/pom.xml
index fc84687..f1c6b60 100644
--- a/pom.xml
+++ b/pom.xml
@@ -38,7 +38,7 @@
   
   org.apache.hbase.thirdparty
   hbase-thirdparty
-  1.0.1
+  2.0.0
   Apache HBase Third-Party Libs
   pom
   



hbase git commit: HBASE-20214 Review of RegionLocationFinder Class

2018-03-16 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master bedf849d8 -> 104f58701


HBASE-20214 Review of RegionLocationFinder Class


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/104f5870
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/104f5870
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/104f5870

Branch: refs/heads/master
Commit: 104f58701efd341dca8642b4566653cb93b48c12
Parents: bedf849
Author: BELUGA BEHR 
Authored: Fri Mar 16 16:09:40 2018 -0700
Committer: tedyu 
Committed: Fri Mar 16 16:09:40 2018 -0700

--
 .../master/balancer/RegionLocationFinder.java   | 87 +++-
 1 file changed, 32 insertions(+), 55 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/104f5870/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
index 07e9600..8b764d9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/RegionLocationFinder.java
@@ -21,12 +21,18 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
+
+import org.apache.commons.collections4.CollectionUtils;
+import org.apache.commons.collections4.MultiValuedMap;
+import org.apache.commons.collections4.multimap.ArrayListValuedHashMap;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.ClusterMetrics;
 import org.apache.hadoop.hbase.HDFSBlocksDistribution;
@@ -41,10 +47,10 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import org.apache.hbase.thirdparty.com.google.common.cache.CacheBuilder;
 import org.apache.hbase.thirdparty.com.google.common.cache.CacheLoader;
 import org.apache.hbase.thirdparty.com.google.common.cache.LoadingCache;
-import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
 import org.apache.hbase.thirdparty.com.google.common.util.concurrent.Futures;
 import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListenableFuture;
 import 
org.apache.hbase.thirdparty.com.google.common.util.concurrent.ListeningExecutorService;
@@ -132,7 +138,6 @@ class RegionLocationFinder {
   // Only count the refresh if it includes user tables ( eg more than meta 
and namespace ).
   lastFullRefresh = scheduleFullRefresh()?currentTime:lastFullRefresh;
 }
-
   }
 
   /**
@@ -171,14 +176,10 @@ class RegionLocationFinder {
*/
   protected List getTopBlockLocations(RegionInfo region, String 
currentHost) {
 HDFSBlocksDistribution blocksDistribution = getBlockDistribution(region);
-List topHosts = new ArrayList<>();
-for (String host : blocksDistribution.getTopHosts()) {
-  if (host.equals(currentHost)) {
-break;
-  }
-  topHosts.add(host);
-}
-return mapHostNameToServerName(topHosts);
+List topHosts = blocksDistribution.getTopHosts();
+int toIndex = topHosts.indexOf(currentHost);
+List subTopHosts = (toIndex < 0) ? topHosts : topHosts.subList(0, 
toIndex);
+return mapHostNameToServerName(subTopHosts);
   }
 
   /**
@@ -211,7 +212,7 @@ class RegionLocationFinder {
*
* @param tableName the table name
* @return TableDescriptor
-   * @throws IOException
+   * @throws IOException if table descriptor cannot be loaded
*/
   protected TableDescriptor getTableDescriptor(TableName tableName) throws 
IOException {
 TableDescriptor tableDescriptor = null;
@@ -220,8 +221,8 @@ class RegionLocationFinder {
 tableDescriptor = this.services.getTableDescriptors().get(tableName);
   }
 } catch (FileNotFoundException fnfe) {
-  LOG.debug("FileNotFoundException during getTableDescriptors." + " 
Current table name = "
-  + tableName, fnfe);
+  LOG.debug("FileNotFoundException during getTableDescriptors. Current 
table name =  {}",
+  tableName, fnfe);
 }
 
 return tableDescriptor;
@@ -235,60 +236,36 @@ class RegionLocationFinder {
* @return ServerName list
*/
   protected List mapHostNameToServerName(List hosts) {
-if (hosts == null || stat

[3/3] hbase git commit: HBASE-20186 Improve RSGroupBasedLoadBalancer#balanceCluster() to be more efficient when calculating cluster state for each rsgroup

2018-03-16 Thread apurtell
HBASE-20186 Improve RSGroupBasedLoadBalancer#balanceCluster() to be more 
efficient when calculating cluster state for each rsgroup

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/03e7b782
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/03e7b782
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/03e7b782

Branch: refs/heads/branch-2
Commit: 03e7b78260413ebc508b3415dbe9c14be6a361de
Parents: 3f1c867
Author: Xiang LI 
Authored: Wed Mar 14 01:08:26 2018 +0800
Committer: Andrew Purtell 
Committed: Fri Mar 16 18:01:01 2018 -0700

--
 .../hbase/rsgroup/RSGroupBasedLoadBalancer.java | 20 +---
 1 file changed, 13 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/03e7b782/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
index 392cbab..3182a61 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
@@ -127,17 +127,23 @@ public class RSGroupBasedLoadBalancer implements 
RSGroupableBalancer {
   regionPlans.add(new RegionPlan(regionInfo, serverName, null));
 }
 try {
-  List rsgi = rsGroupInfoManager.listRSGroups();
-  for (RSGroupInfo info: rsgi) {
+  // Record which region servers have been processed,so as to skip them 
after processed
+  HashSet processedServers = new HashSet<>();
+
+  // For each rsgroup
+  for (RSGroupInfo rsgroup : rsGroupInfoManager.listRSGroups()) {
 Map> groupClusterState = new HashMap<>();
 Map>> groupClusterLoad = 
new HashMap<>();
-for (Address sName : info.getServers()) {
-  for(ServerName curr: clusterState.keySet()) {
-if(curr.getAddress().equals(sName)) {
-  groupClusterState.put(curr, correctedState.get(curr));
-}
+for (ServerName server : clusterState.keySet()) { // for each region 
server
+  if (!processedServers.contains(server) // server is not processed yet
+  && rsgroup.containsServer(server.getAddress())) { // server 
belongs to this rsgroup
+List regionsOnServer = correctedState.get(server);
+groupClusterState.put(server, regionsOnServer);
+
+processedServers.add(server);
   }
 }
+
 groupClusterLoad.put(HConstants.ENSEMBLE_TABLE_NAME, 
groupClusterState);
 this.internalBalancer.setClusterLoad(groupClusterLoad);
 List groupPlans = this.internalBalancer



[2/3] hbase git commit: HBASE-20186 Improve RSGroupBasedLoadBalancer#balanceCluster() to be more efficient when calculating cluster state for each rsgroup

2018-03-16 Thread apurtell
HBASE-20186 Improve RSGroupBasedLoadBalancer#balanceCluster() to be more 
efficient when calculating cluster state for each rsgroup

Signed-off-by: tedyu 

Conflicts:

hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9fb473c0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9fb473c0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9fb473c0

Branch: refs/heads/branch-1.4
Commit: 9fb473c0e0f223c7085046edeab6f3ec28d67c49
Parents: 51a35aa
Author: Xiang LI 
Authored: Wed Mar 14 01:08:26 2018 +0800
Committer: Andrew Purtell 
Committed: Fri Mar 16 18:00:59 2018 -0700

--
 .../hbase/rsgroup/RSGroupBasedLoadBalancer.java | 21 
 1 file changed, 13 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9fb473c0/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
index 529a7e6..9f8f427 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
@@ -135,16 +135,21 @@ public class RSGroupBasedLoadBalancer implements 
RSGroupableBalancer, LoadBalanc
   regionPlans.add(new RegionPlan(regionInfo, null, null));
 }
 try {
-  for (RSGroupInfo info : infoManager.listRSGroups()) {
-Map> groupClusterState =
-new HashMap>();
-for (Address addr : info.getServers()) {
-  for(ServerName curr: clusterState.keySet()) {
-if(curr.getAddress().equals(addr)) {
-  groupClusterState.put(curr, correctedState.get(curr));
-}
+  // Record which region servers have been processed,so as to skip them 
after processed
+  HashSet processedServers = new HashSet<>();
+
+  // For each rsgroup
+  for (RSGroupInfo rsgroup : infoManager.listRSGroups()) {
+Map> groupClusterState = new HashMap<>();
+for (ServerName server : clusterState.keySet()) { // for each region 
server
+  if (!processedServers.contains(server) // server is not processed yet
+  && rsgroup.containsServer(server.getAddress())) { // server 
belongs to this rsgroup
+List regionsOnServer = correctedState.get(server);
+groupClusterState.put(server, regionsOnServer);
+processedServers.add(server);
   }
 }
+
 List groupPlans = this.internalBalancer
 .balanceCluster(groupClusterState);
 if (groupPlans != null) {



[1/3] hbase git commit: HBASE-20186 Improve RSGroupBasedLoadBalancer#balanceCluster() to be more efficient when calculating cluster state for each rsgroup

2018-03-16 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 009295a3a -> 764798d99
  refs/heads/branch-1.4 51a35aafc -> 9fb473c0e
  refs/heads/branch-2 3f1c86786 -> 03e7b7826


HBASE-20186 Improve RSGroupBasedLoadBalancer#balanceCluster() to be more 
efficient when calculating cluster state for each rsgroup

Signed-off-by: tedyu 

Conflicts:

hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/764798d9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/764798d9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/764798d9

Branch: refs/heads/branch-1
Commit: 764798d99603894c9f033d198ea1215f6fa716c5
Parents: 009295a
Author: Xiang LI 
Authored: Wed Mar 14 01:08:26 2018 +0800
Committer: Andrew Purtell 
Committed: Fri Mar 16 18:00:52 2018 -0700

--
 .../hbase/rsgroup/RSGroupBasedLoadBalancer.java | 21 
 1 file changed, 13 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/764798d9/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
index 529a7e6..9f8f427 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupBasedLoadBalancer.java
@@ -135,16 +135,21 @@ public class RSGroupBasedLoadBalancer implements 
RSGroupableBalancer, LoadBalanc
   regionPlans.add(new RegionPlan(regionInfo, null, null));
 }
 try {
-  for (RSGroupInfo info : infoManager.listRSGroups()) {
-Map> groupClusterState =
-new HashMap>();
-for (Address addr : info.getServers()) {
-  for(ServerName curr: clusterState.keySet()) {
-if(curr.getAddress().equals(addr)) {
-  groupClusterState.put(curr, correctedState.get(curr));
-}
+  // Record which region servers have been processed,so as to skip them 
after processed
+  HashSet processedServers = new HashSet<>();
+
+  // For each rsgroup
+  for (RSGroupInfo rsgroup : infoManager.listRSGroups()) {
+Map> groupClusterState = new HashMap<>();
+for (ServerName server : clusterState.keySet()) { // for each region 
server
+  if (!processedServers.contains(server) // server is not processed yet
+  && rsgroup.containsServer(server.getAddress())) { // server 
belongs to this rsgroup
+List regionsOnServer = correctedState.get(server);
+groupClusterState.put(server, regionsOnServer);
+processedServers.add(server);
   }
 }
+
 List groupPlans = this.internalBalancer
 .balanceCluster(groupClusterState);
 if (groupPlans != null) {