[34/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/CopyTable.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/CopyTable.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/CopyTable.html
index 9e00828..027a68b 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/CopyTable.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/CopyTable.html
@@ -168,7 +168,7 @@
 160  try (Connection conn = 
ConnectionFactory.createConnection(getConf());
 161  Admin admin = conn.getAdmin()) 
{
 162
HFileOutputFormat2.configureIncrementalLoadMap(job,
-163
admin.getTableDescriptor((TableName.valueOf(dstTableName;
+163
admin.listTableDescriptor((TableName.valueOf(dstTableName;
 164  }
 165} else {
 166  
TableMapReduceUtil.initTableMapperJob(tableName, scan,



[04/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html
index 7051a7b..775b4c1 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/ColumnFamilyDescriptor.html
@@ -99,6 +99,29 @@
  BlockCache.
 
 
+
+org.apache.hadoop.hbase.mapreduce
+
+Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce;>MapReduce
+Input/OutputFormats, a table indexing MapReduce job, and utility methods.
+
+
+
+org.apache.hadoop.hbase.mob
+
+
+
+org.apache.hadoop.hbase.regionserver
+
+
+
+org.apache.hadoop.hbase.security
+
+
+
+org.apache.hadoop.hbase.tool
+
+
 
 
 
@@ -393,6 +416,288 @@
 
 
 
+
+
+
+Uses of ColumnFamilyDescriptor 
in org.apache.hadoop.hbase.mapreduce
+
+Fields in org.apache.hadoop.hbase.mapreduce
 with type parameters of type ColumnFamilyDescriptor
+
+Modifier and Type
+Field and Description
+
+
+
+(package private) static http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true;
 title="class or interface in java.util.function">FunctionColumnFamilyDescriptor,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+HFileOutputFormat2.blockSizeDetails
+Serialize column family to block size map to 
configuration.
+
+
+
+(package private) static http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true;
 title="class or interface in java.util.function">FunctionColumnFamilyDescriptor,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+HFileOutputFormat2.bloomTypeDetails
+Serialize column family to bloom type map to 
configuration.
+
+
+
+(package private) static http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true;
 title="class or interface in java.util.function">FunctionColumnFamilyDescriptor,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+HFileOutputFormat2.compressionDetails
+Serialize column family to compression algorithm map to 
configuration.
+
+
+
+(package private) static http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true;
 title="class or interface in java.util.function">FunctionColumnFamilyDescriptor,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+HFileOutputFormat2.dataBlockEncodingDetails
+Serialize column family to data block encoding map to 
configuration.
+
+
+
+
+
+Method parameters in org.apache.hadoop.hbase.mapreduce
 with type arguments of type ColumnFamilyDescriptor
+
+Modifier and Type
+Method and Description
+
+
+
+(package private) static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+HFileOutputFormat2.serializeColumnFamilyAttribute(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true;
 title="class or interface in java.util.function">FunctionColumnFamilyDescriptor,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringfn,
+  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptorallTables)
+
+
+
+
+
+
+
+Uses of ColumnFamilyDescriptor 
in org.apache.hadoop.hbase.mob
+
+Methods in org.apache.hadoop.hbase.mob
 with parameters of type ColumnFamilyDescriptor
+
+Modifier and Type
+Method and Description
+
+
+
+static StoreFileWriter
+MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf,
+org.apache.hadoop.fs.FileSystemfs,
+ColumnFamilyDescriptorfamily,
+org.apache.hadoop.fs.Pathpath,
+longmaxKeyCount,
+Compression.Algorithmcompression,
+CacheConfigcacheConfig,
+Encryption.ContextcryptoContext,
+ChecksumTypechecksumType,
+intbytesPerChecksum,
+intblocksize,
+BloomTypebloomType,
+booleanisCompaction)
+Creates a writer for the mob file in temp directory.
+
+
+
+
+
+Constructors in org.apache.hadoop.hbase.mob
 with parameters of type ColumnFamilyDescriptor
+
+Constructor and Description
+
+
+
+MobCacheConfig(org.apache.hadoop.conf.Configurationconf,
+  ColumnFamilyDescriptorfamily)
+
+
+
+
+
+
+
+Uses of ColumnFamilyDescriptor 
in 

[51/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/9fb0764b
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/9fb0764b
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/9fb0764b

Branch: refs/heads/asf-site
Commit: 9fb0764ba186251e65f53971c482d3dcbe9a5213
Parents: 2250520
Author: jenkins 
Authored: Sat Jul 8 15:01:30 2017 +
Committer: jenkins 
Committed: Sat Jul 8 15:01:30 2017 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf|  9645 
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 apidocs/deprecated-list.html|   144 +
 apidocs/index-all.html  |   223 +-
 .../apache/hadoop/hbase/HTableDescriptor.html   |   185 +-
 .../hbase/class-use/HTableDescriptor.html   |   165 +-
 .../hadoop/hbase/class-use/TableName.html   |92 +-
 .../hbase/class-use/TableNotFoundException.html | 9 +
 .../org/apache/hadoop/hbase/client/Admin.html   |  1355 +-
 .../apache/hadoop/hbase/client/AsyncAdmin.html  |   550 +-
 .../org/apache/hadoop/hbase/client/Table.html   |   157 +-
 .../hadoop/hbase/client/TableDescriptor.html|62 +-
 .../hbase/client/class-use/RegionLocator.html   | 4 +-
 .../hbase/client/class-use/TableDescriptor.html |   156 +-
 .../apache/hadoop/hbase/client/package-use.html |16 +
 .../hbase/mapreduce/HFileOutputFormat2.html |44 +-
 .../hadoop/hbase/mapreduce/ImportTsv.html   |40 +-
 .../hbase/mapreduce/LoadIncrementalHFiles.html  |50 +-
 .../mapreduce/MultiTableHFileOutputFormat.html  |22 +-
 .../org/apache/hadoop/hbase/package-use.html|10 +-
 .../hadoop/hbase/rest/client/RemoteHTable.html  |   180 +-
 .../apache/hadoop/hbase/HTableDescriptor.html   |  1342 +-
 .../org/apache/hadoop/hbase/client/Admin.html   |  4046 ++--
 .../apache/hadoop/hbase/client/AsyncAdmin.html  |  1217 +-
 .../org/apache/hadoop/hbase/client/Table.html   |  1177 +-
 .../hadoop/hbase/client/TableDescriptor.html|   122 +-
 .../hbase/client/TableDescriptorBuilder.html|  1915 +-
 .../hadoop/hbase/mapreduce/CopyTable.html   | 2 +-
 .../hbase/mapreduce/HFileOutputFormat2.html |  1714 +-
 .../apache/hadoop/hbase/mapreduce/Import.html   | 8 +-
 .../hadoop/hbase/mapreduce/ImportTsv.html   |  1416 +-
 .../hbase/mapreduce/LoadIncrementalHFiles.html  |  2477 +--
 .../mapreduce/MultiTableHFileOutputFormat.html  |   199 +-
 .../hadoop/hbase/mapreduce/WALPlayer.html   | 2 +-
 .../hadoop/hbase/rest/client/RemoteHTable.html  |  1646 +-
 book.html   |79 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 19494 +
 checkstyle.rss  |62 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/constant-values.html | 6 +-
 devapidocs/deprecated-list.html |   158 +-
 devapidocs/index-all.html   |   413 +-
 .../apache/hadoop/hbase/HTableDescriptor.html   |   185 +-
 .../hadoop/hbase/backup/package-tree.html   | 2 +-
 .../hadoop/hbase/class-use/CellComparator.html  | 4 +-
 .../hbase/class-use/HColumnDescriptor.html  |   281 +-
 .../hbase/class-use/HDFSBlocksDistribution.html | 8 +-
 .../hadoop/hbase/class-use/HRegionInfo.html |96 +-
 .../hbase/class-use/HTableDescriptor.html   |   493 +-
 .../hadoop/hbase/class-use/ServerName.html  |40 +-
 .../hadoop/hbase/class-use/TableName.html   |   399 +-
 .../hbase/class-use/TableNotFoundException.html |13 +
 .../hbase/classification/package-tree.html  | 6 +-
 .../org/apache/hadoop/hbase/client/Admin.html   |  1355 +-
 .../apache/hadoop/hbase/client/AsyncAdmin.html  |   550 +-
 .../hadoop/hbase/client/AsyncHBaseAdmin.html|   509 +-
 .../client/HBaseAdmin.AbortProcedureFuture.html | 8 +-
 .../HBaseAdmin.AddColumnFamilyFuture.html   | 6 +-
 .../client/HBaseAdmin.CreateTableFuture.html|26 +-
 .../HBaseAdmin.DeleteColumnFamilyFuture.html| 6 +-
 .../client/HBaseAdmin.DeleteTableFuture.html|10 +-
 .../client/HBaseAdmin.DisableTableFuture.html   | 8 +-
 .../client/HBaseAdmin.EnableTableFuture.html| 8 +-
 .../HBaseAdmin.MergeTableRegionsFuture.html | 8 +-
 .../HBaseAdmin.ModifyColumnFamilyFuture.html| 

[12/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
index 5e4e122..61cb4f0 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ProcedureFuture.html
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Private
  @InterfaceStability.Evolving
-protected static class HBaseAdmin.ProcedureFutureV
+protected static class HBaseAdmin.ProcedureFutureV
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">FutureV
 Future that waits on a procedure result.
@@ -328,7 +328,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 exception
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutionException.html?is-external=true;
 title="class or interface in java.util.concurrent">ExecutionException exception
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutionException.html?is-external=true;
 title="class or interface in java.util.concurrent">ExecutionException exception
 
 
 
@@ -337,7 +337,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 procResultFound
-privateboolean procResultFound
+privateboolean procResultFound
 
 
 
@@ -346,7 +346,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 done
-privateboolean done
+privateboolean done
 
 
 
@@ -355,7 +355,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 cancelled
-privateboolean cancelled
+privateboolean cancelled
 
 
 
@@ -364,7 +364,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 result
-privateV result
+privateV result
 
 
 
@@ -373,7 +373,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 admin
-private finalHBaseAdmin admin
+private finalHBaseAdmin admin
 
 
 
@@ -382,7 +382,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 procId
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long procId
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long procId
 
 
 
@@ -399,7 +399,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 ProcedureFuture
-publicProcedureFuture(HBaseAdminadmin,
+publicProcedureFuture(HBaseAdminadmin,
http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">LongprocId)
 
 
@@ -417,7 +417,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 cancel
-publicbooleancancel(booleanmayInterruptIfRunning)
+publicbooleancancel(booleanmayInterruptIfRunning)
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true#cancel-boolean-;
 title="class or interface in java.util.concurrent">cancelin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">FutureV
@@ -430,7 +430,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 isCancelled
-publicbooleanisCancelled()
+publicbooleanisCancelled()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true#isCancelled--;
 title="class or interface in 
java.util.concurrent">isCancelledin interfacehttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">FutureV
@@ -443,7 +443,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 abortProcedureResult
-protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponseabortProcedureResult(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequestrequest)
+protectedorg.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponseabortProcedureResult(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequestrequest)

   throws 

[13/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
index 49fff53..49f7926 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.AbortProcedureFuture.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HBaseAdmin.AbortProcedureFuture
+private static class HBaseAdmin.AbortProcedureFuture
 extends HBaseAdmin.ProcedureFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 
 
@@ -235,7 +235,7 @@ extends 
 
 isAbortInProgress
-privateboolean isAbortInProgress
+privateboolean isAbortInProgress
 
 
 
@@ -252,7 +252,7 @@ extends 
 
 AbortProcedureFuture
-publicAbortProcedureFuture(HBaseAdminadmin,
+publicAbortProcedureFuture(HBaseAdminadmin,
 http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">LongprocId,
 http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in 
java.lang">BooleanabortProcResponse)
 
@@ -271,7 +271,7 @@ extends 
 
 get
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Booleanget(longtimeout,
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Booleanget(longtimeout,
http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeUnit.html?is-external=true;
 title="class or interface in java.util.concurrent">TimeUnitunit)
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException,
http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutionException.html?is-external=true;
 title="class or interface in java.util.concurrent">ExecutionException,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.AddColumnFamilyFuture.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.AddColumnFamilyFuture.html
 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.AddColumnFamilyFuture.html
index dc54cdc..dd16dde 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.AddColumnFamilyFuture.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.AddColumnFamilyFuture.html
@@ -132,7 +132,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HBaseAdmin.AddColumnFamilyFuture
+private static class HBaseAdmin.AddColumnFamilyFuture
 extends HBaseAdmin.ModifyTableFuture
 
 
@@ -246,7 +246,7 @@ extends 
 
 AddColumnFamilyFuture
-publicAddColumnFamilyFuture(HBaseAdminadmin,
+publicAddColumnFamilyFuture(HBaseAdminadmin,
  TableNametableName,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponseresponse)
 
@@ -265,7 +265,7 @@ extends 
 
 getOperationType
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOperationType()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOperationType()
 
 Overrides:
 getOperationTypein
 classHBaseAdmin.ModifyTableFuture

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
index 51e7fc6..e6dd218 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.CreateTableFuture.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HBaseAdmin.CreateTableFuture
+private static class HBaseAdmin.CreateTableFuture
 extends HBaseAdmin.TableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 
 
@@ -170,7 +170,7 @@ extends Field and Description
 
 
-private HTableDescriptor
+private 

[11/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
index 935e2a5..6386f5b 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10,"i14":41,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":42,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":9,"i79":10,"i80":10,"i81":9,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109
 
":10,"i110":9,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":42,"i165":10,"i166":10,"i167":10,"i168":42,"i169":10,"i170":10,"i171":10,"i172":10,"i173":10,"i174":10,"i175":10,"i176":10,"i177":10,"i178":10,"i179":10,"i180":10,"i181":10,"i182":10,"i183":10,"i184":10,"i185":10,"i186":10,"i187":10,"i188":42,"i189":10,"i190":10,"i191":10,"i192":10,"i193":10,"i194":10,"i195":10,"i196":10,"i197":10,"i198":10,"i199":10,"i200":10,"i201":10,"i202":10,"i203":10,"i204":10,"i205":10,"i206":10,"i207":10,"i208":10,"i2
 
09":10,"i210":10,"i211":10,"i212":10,"i213":10,"i214":10,"i215":10,"i216":10,"i217":10,"i218":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":42,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":10,"i13":10,"i14":41,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":42,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":9,"i79":10,"i80":10,"i81":9,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":41,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i109
 
":10,"i110":10,"i111":9,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10,"i119":10,"i120":10,"i121":10,"i122":10,"i123":10,"i124":10,"i125":10,"i126":10,"i127":10,"i128":10,"i129":10,"i130":10,"i131":10,"i132":10,"i133":10,"i134":10,"i135":10,"i136":10,"i137":10,"i138":10,"i139":10,"i140":10,"i141":10,"i142":10,"i143":10,"i144":10,"i145":10,"i146":10,"i147":10,"i148":10,"i149":10,"i150":10,"i151":10,"i152":10,"i153":10,"i154":10,"i155":10,"i156":10,"i157":10,"i158":10,"i159":10,"i160":10,"i161":10,"i162":10,"i163":10,"i164":10,"i165":10,"i166":10,"i167":10,"i168":10,"i169":10,"i170":10,"i171":10,"i172":10,"i173":42,"i174":10,"i175":10,"i176":10,"i177":42,"i178":10,"i179":10,"i180":10,"i181":10,"i182":10,"i183":10,"i184":10,"i185":10,"i186":10,"i187":10,"i188":10,"i189":10,"i190":10,"i191":10,"i192":10,"i193":10,"i194":10,"i195":10,"i196":10,"i197":10,"i198":10,"i199":42,"i200":10,"i201":10,"i202":10,"i203":10,"i204":10,"i205":10,"i206":10,"i207":10,"i208":10,"i2
 
09":10,"i210":10,"i211":10,"i212":10,"i213":10,"i214":10,"i215":10,"i216":10,"i217":10,"i218":10,"i219":10,"i220":10,"i221":10,"i222":10,"i223":10,"i224":10,"i225":10,"i226":10,"i227":10,"i228":10,"i229":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated 

[38/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
index 8abc3e6..09921d4 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -292,614 +292,677 @@
 284  
CompletableFutureListNamespaceDescriptor 
listNamespaceDescriptors();
 285
 286  /**
-287   * Turn the load balancer on or off.
-288   * @param on
-289   * @return Previous balancer value 
wrapped by a {@link CompletableFuture}.
-290   */
-291  CompletableFutureBoolean 
setBalancerOn(boolean on);
-292
-293  /**
-294   * Invoke the balancer. Will run the 
balancer and if regions to move, it will go ahead and do the
-295   * reassignments. Can NOT run for 
various reasons. Check logs.
-296   * @return True if balancer ran, false 
otherwise. The return value will be wrapped by a
-297   * {@link CompletableFuture}.
-298   */
-299  default 
CompletableFutureBoolean balance() {
-300return balance(false);
-301  }
+287   * Close a region. For expert-admins 
Runs close on the regionserver. The master will not be
+288   * informed of the close.
+289   * @param regionName region name to 
close
+290   * @param serverName The servername of 
the regionserver. If not present, we will use servername
+291   *  found in the hbase:meta 
table. A server name is made of host, port and startcode. Here
+292   *  is an example: code 
host187.example.com,60020,1289493121758/code
+293   * @return true if the region was 
closed, false if not. The return value will be wrapped by a
+294   * {@link CompletableFuture}.
+295   */
+296  CompletableFutureBoolean 
closeRegion(byte[] regionName, OptionalServerName serverName);
+297
+298  /**
+299   * Get all the online regions on a 
region server.
+300   */
+301  
CompletableFutureListHRegionInfo getOnlineRegions(ServerName 
serverName);
 302
 303  /**
-304   * Invoke the balancer. Will run the 
balancer and if regions to move, it will go ahead and do the
-305   * reassignments. If there is region in 
transition, force parameter of true would still run
-306   * balancer. Can *not* run for other 
reasons. Check logs.
-307   * @param forcible whether we should 
force balance even if there is region in transition.
-308   * @return True if balancer ran, false 
otherwise. The return value will be wrapped by a
-309   * {@link CompletableFuture}.
-310   */
-311  CompletableFutureBoolean 
balance(boolean forcible);
-312
-313  /**
-314   * Query the current state of the 
balancer.
-315   * @return true if the balance switch 
is on, false otherwise The return value will be wrapped by a
-316   * {@link CompletableFuture}.
+304   * Get the regions of a given table.
+305   */
+306  
CompletableFutureListHRegionInfo getTableRegions(TableName 
tableName);
+307
+308  /**
+309   * Flush a table.
+310   * @param tableName table to flush
+311   */
+312  CompletableFutureVoid 
flush(TableName tableName);
+313
+314  /**
+315   * Flush an individual region.
+316   * @param regionName region to flush
 317   */
-318  CompletableFutureBoolean 
isBalancerOn();
+318  CompletableFutureVoid 
flushRegion(byte[] regionName);
 319
 320  /**
-321   * Close a region. For expert-admins 
Runs close on the regionserver. The master will not be
-322   * informed of the close.
-323   * @param regionName region name to 
close
-324   * @param serverName The servername of 
the regionserver. If not present, we will use servername
-325   *  found in the hbase:meta 
table. A server name is made of host, port and startcode. Here
-326   *  is an example: code 
host187.example.com,60020,1289493121758/code
-327   * @return true if the region was 
closed, false if not. The return value will be wrapped by a
-328   * {@link CompletableFuture}.
-329   */
-330  CompletableFutureBoolean 
closeRegion(byte[] regionName, OptionalServerName serverName);
-331
-332  /**
-333   * Get all the online regions on a 
region server.
-334   */
-335  
CompletableFutureListHRegionInfo getOnlineRegions(ServerName 
serverName);
-336
-337  /**
-338   * Get the regions of a given table.
-339   */
-340  
CompletableFutureListHRegionInfo getTableRegions(TableName 
tableName);
-341
-342  /**
-343   * Flush a table.
-344   * @param tableName table to flush
-345   */
-346  CompletableFutureVoid 
flush(TableName tableName);
+321   * Compact a table. When the returned 
CompletableFuture is done, it only means the compact request
+322   * was sent to HBase and may need some 
time to finish the compact operation.
+323   * @param tableName table to compact
+324   */
+325  default CompletableFutureVoid 
compact(TableName tableName) {
+326return compact(tableName, 

[32/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
index 290ffe3..986726c 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
@@ -238,7 +238,7 @@
 230  int reduceNum = 
context.getNumReduceTasks();
 231  Configuration conf = 
context.getConfiguration();
 232  TableName tableName = 
TableName.valueOf(context.getConfiguration().get(TABLE_NAME));
-233  try (Connection conn = 
ConnectionFactory.createConnection(conf); 
+233  try (Connection conn = 
ConnectionFactory.createConnection(conf);
 234  RegionLocator regionLocator = 
conn.getRegionLocator(tableName)) {
 235byte[][] startKeys = 
regionLocator.getStartKeys();
 236if (startKeys.length != 
reduceNum) {
@@ -630,10 +630,10 @@
 622
 623if (hfileOutPath != null  
conf.getBoolean(HAS_LARGE_RESULT, false)) {
 624  LOG.info("Use Large Result!!");
-625  try (Connection conn = 
ConnectionFactory.createConnection(conf); 
+625  try (Connection conn = 
ConnectionFactory.createConnection(conf);
 626  Table table = 
conn.getTable(tableName);
 627  RegionLocator regionLocator = 
conn.getRegionLocator(tableName)) {
-628
HFileOutputFormat2.configureIncrementalLoad(job, table.getTableDescriptor(), 
regionLocator);
+628
HFileOutputFormat2.configureIncrementalLoad(job, table.getDescriptor(), 
regionLocator);
 629
job.setMapperClass(KeyValueSortImporter.class);
 630
job.setReducerClass(KeyValueReducer.class);
 631Path outputDir = new 
Path(hfileOutPath);
@@ -663,7 +663,7 @@
 655
FileOutputFormat.setOutputPath(job, outputDir);
 656
job.setMapOutputKeyClass(ImmutableBytesWritable.class);
 657
job.setMapOutputValueClass(KeyValue.class);
-658
HFileOutputFormat2.configureIncrementalLoad(job, table.getTableDescriptor(), 
regionLocator);
+658
HFileOutputFormat2.configureIncrementalLoad(job, table.getDescriptor(), 
regionLocator);
 659
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
 660
com.google.common.base.Preconditions.class);
 661  }



[06/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/Table.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Table.html 
b/devapidocs/org/apache/hadoop/hbase/client/Table.html
index a76bcee..201b071 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Table.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Table.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":38,"i25":6,"i26":6,"i27":6,"i28":6,"i29":38,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":38,"i38":38,"i39":38,"i40":38,"i41":38};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":38,"i26":6,"i27":6,"i28":6,"i29":38,"i30":38,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":38,"i39":38,"i40":38,"i41":38,"i42":38};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -327,24 +327,30 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
+TableDescriptor
+getDescriptor()
+Gets the table 
descriptor for this table.
+
+
+
 TableName
 getName()
 Gets the fully qualified table name instance of this 
table.
 
 
-
+
 int
 getOperationTimeout()
 Get timeout (millisecond) of each operation for in Table 
instance.
 
 
-
+
 int
 getReadRpcTimeout()
 Get timeout (millisecond) of each rpc read request in this 
Table instance.
 
 
-
+
 int
 getRpcTimeout()
 Deprecated.
@@ -352,33 +358,36 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 ResultScanner
 getScanner(byte[]family)
 Gets a scanner on the current table for the given 
family.
 
 
-
+
 ResultScanner
 getScanner(byte[]family,
   byte[]qualifier)
 Gets a scanner on the current table for the given family 
and qualifier.
 
 
-
+
 ResultScanner
 getScanner(Scanscan)
 Returns a scanner on the current table as specified by the 
Scan
  object.
 
 
-
+
 HTableDescriptor
 getTableDescriptor()
-Gets the table descriptor for 
this table.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version.
+ use getDescriptor()
+
 
 
-
+
 long
 getWriteBufferSize()
 Deprecated.
@@ -386,19 +395,19 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 int
 getWriteRpcTimeout()
 Get timeout (millisecond) of each rpc write request in this 
Table instance.
 
 
-
+
 Result
 increment(Incrementincrement)
 Increments one or more columns within a single row.
 
 
-
+
 long
 incrementColumnValue(byte[]row,
 byte[]family,
@@ -407,7 +416,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 See incrementColumnValue(byte[],
 byte[], byte[], long, Durability)
 
 
-
+
 long
 incrementColumnValue(byte[]row,
 byte[]family,
@@ -417,25 +426,25 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 Atomically increments a column value.
 
 
-
+
 void
 mutateRow(RowMutationsrm)
 Performs multiple mutations atomically on a single 
row.
 
 
-
+
 void
 put(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPutputs)
 Puts some data in the table, in batch.
 
 
-
+
 void
 put(Putput)
 Puts some data in the table.
 
 
-
+
 void
 setOperationTimeout(intoperationTimeout)
 Deprecated.
@@ -443,7 +452,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 void
 setReadRpcTimeout(intreadRpcTimeout)
 Deprecated.
@@ -451,7 +460,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 void
 setRpcTimeout(intrpcTimeout)
 Deprecated.
@@ -459,7 +468,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 void
 setWriteBufferSize(longwriteBufferSize)
 Deprecated.
@@ -468,7 +477,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 void
 setWriteRpcTimeout(intwriteRpcTimeout)
 Deprecated.
@@ -520,8 +529,11 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 getTableDescriptor
-HTableDescriptorgetTableDescriptor()
- throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
+HTableDescriptorgetTableDescriptor()
+

[33/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
index 885e6f8..cd44020 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
@@ -33,871 +33,881 @@
 025import 
java.nio.charset.StandardCharsets;
 026import java.util.ArrayList;
 027import java.util.Arrays;
-028import java.util.Collection;
-029import java.util.List;
-030import java.util.Map;
-031import java.util.Set;
-032import java.util.TreeMap;
-033import java.util.TreeSet;
-034import java.util.UUID;
-035import java.util.function.Function;
-036import java.util.stream.Collectors;
-037
-038import 
org.apache.commons.lang.StringUtils;
-039import org.apache.commons.logging.Log;
-040import 
org.apache.commons.logging.LogFactory;
-041import 
org.apache.hadoop.conf.Configuration;
-042import org.apache.hadoop.fs.FileSystem;
-043import org.apache.hadoop.fs.Path;
-044import org.apache.hadoop.hbase.Cell;
-045import 
org.apache.hadoop.hbase.CellComparator;
-046import 
org.apache.hadoop.hbase.CellUtil;
-047import 
org.apache.hadoop.hbase.HColumnDescriptor;
-048import 
org.apache.hadoop.hbase.HConstants;
-049import 
org.apache.hadoop.hbase.HRegionLocation;
-050import 
org.apache.hadoop.hbase.HTableDescriptor;
-051import 
org.apache.hadoop.hbase.KeyValue;
-052import 
org.apache.hadoop.hbase.KeyValueUtil;
-053import 
org.apache.hadoop.hbase.TableName;
-054import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-055import 
org.apache.hadoop.hbase.client.Connection;
-056import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-057import 
org.apache.hadoop.hbase.client.Put;
-058import 
org.apache.hadoop.hbase.client.RegionLocator;
-059import 
org.apache.hadoop.hbase.client.Table;
-060import 
org.apache.hadoop.hbase.fs.HFileSystem;
-061import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-062import 
org.apache.hadoop.hbase.io.compress.Compression;
-063import 
org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
-064import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-065import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-066import 
org.apache.hadoop.hbase.io.hfile.HFile;
-067import 
org.apache.hadoop.hbase.io.hfile.HFileContext;
-068import 
org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
-069import 
org.apache.hadoop.hbase.io.hfile.HFileWriterImpl;
+028import java.util.List;
+029import java.util.Map;
+030import java.util.Set;
+031import java.util.TreeMap;
+032import java.util.TreeSet;
+033import java.util.UUID;
+034import java.util.function.Function;
+035import java.util.stream.Collectors;
+036
+037import 
org.apache.commons.lang.StringUtils;
+038import org.apache.commons.logging.Log;
+039import 
org.apache.commons.logging.LogFactory;
+040import 
org.apache.hadoop.conf.Configuration;
+041import org.apache.hadoop.fs.FileSystem;
+042import org.apache.hadoop.fs.Path;
+043import org.apache.hadoop.hbase.Cell;
+044import 
org.apache.hadoop.hbase.CellComparator;
+045import 
org.apache.hadoop.hbase.CellUtil;
+046import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+047import 
org.apache.hadoop.hbase.client.TableDescriptor;
+048import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+049import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+050import 
org.apache.hadoop.hbase.client.Connection;
+051import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+052import 
org.apache.hadoop.hbase.client.Put;
+053import 
org.apache.hadoop.hbase.client.RegionLocator;
+054import 
org.apache.hadoop.hbase.client.Table;
+055import 
org.apache.hadoop.hbase.fs.HFileSystem;
+056import 
org.apache.hadoop.hbase.HConstants;
+057import 
org.apache.hadoop.hbase.HRegionLocation;
+058import 
org.apache.hadoop.hbase.HTableDescriptor;
+059import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+060import 
org.apache.hadoop.hbase.io.compress.Compression;
+061import 
org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
+062import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+063import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
+064import 
org.apache.hadoop.hbase.io.hfile.HFile;
+065import 
org.apache.hadoop.hbase.io.hfile.HFileContext;
+066import 
org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
+067import 
org.apache.hadoop.hbase.io.hfile.HFileWriterImpl;
+068import 
org.apache.hadoop.hbase.KeyValue;
+069import 
org.apache.hadoop.hbase.KeyValueUtil;
 070import 
org.apache.hadoop.hbase.regionserver.BloomType;
 071import 
org.apache.hadoop.hbase.regionserver.HStore;
 072import 
org.apache.hadoop.hbase.regionserver.StoreFile;
 073import 

[28/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteHTable.html 
b/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
index 2def063..4e1f2ed 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
@@ -62,854 +62,860 @@
 054import 
org.apache.hadoop.hbase.client.RowMutations;
 055import 
org.apache.hadoop.hbase.client.Scan;
 056import 
org.apache.hadoop.hbase.client.Table;
-057import 
org.apache.hadoop.hbase.client.coprocessor.Batch;
-058import 
org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
-059import 
org.apache.hadoop.hbase.client.metrics.ScanMetrics;
-060import 
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-061import 
org.apache.hadoop.hbase.io.TimeRange;
-062import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-063import 
org.apache.hadoop.hbase.rest.Constants;
-064import 
org.apache.hadoop.hbase.rest.model.CellModel;
-065import 
org.apache.hadoop.hbase.rest.model.CellSetModel;
-066import 
org.apache.hadoop.hbase.rest.model.RowModel;
-067import 
org.apache.hadoop.hbase.rest.model.ScannerModel;
-068import 
org.apache.hadoop.hbase.rest.model.TableSchemaModel;
-069import 
org.apache.hadoop.hbase.util.Bytes;
-070import 
org.apache.hadoop.util.StringUtils;
-071
-072import com.google.protobuf.Descriptors;
-073import com.google.protobuf.Message;
-074import com.google.protobuf.Service;
-075import 
com.google.protobuf.ServiceException;
-076
-077/**
-078 * HTable interface to remote tables 
accessed via REST gateway
-079 */
-080@InterfaceAudience.Public
-081public class RemoteHTable implements 
Table {
-082
-083  private static final Log LOG = 
LogFactory.getLog(RemoteHTable.class);
-084
-085  final Client client;
-086  final Configuration conf;
-087  final byte[] name;
-088  final int maxRetries;
-089  final long sleepTime;
-090
-091  @SuppressWarnings("rawtypes")
-092  protected String buildRowSpec(final 
byte[] row, final Map familyMap,
-093  final long startTime, final long 
endTime, final int maxVersions) {
-094StringBuffer sb = new 
StringBuffer();
-095sb.append('/');
-096sb.append(Bytes.toString(name));
-097sb.append('/');
-098sb.append(toURLEncodedBytes(row));
-099Set families = 
familyMap.entrySet();
-100if (families != null) {
-101  Iterator i = 
familyMap.entrySet().iterator();
-102  sb.append('/');
-103  while (i.hasNext()) {
-104Map.Entry e = 
(Map.Entry)i.next();
-105Collection quals = 
(Collection)e.getValue();
-106if (quals == null || 
quals.isEmpty()) {
-107  // this is an unqualified 
family. append the family name and NO ':'
-108  
sb.append(toURLEncodedBytes((byte[])e.getKey()));
-109} else {
-110  Iterator ii = 
quals.iterator();
-111  while (ii.hasNext()) {
-112
sb.append(toURLEncodedBytes((byte[])e.getKey()));
-113sb.append(':');
-114Object o = ii.next();
-115// Puts use byte[] but 
Deletes use KeyValue
-116if (o instanceof byte[]) {
-117  
sb.append(toURLEncodedBytes((byte[])o));
-118} else if (o instanceof 
KeyValue) {
-119  
sb.append(toURLEncodedBytes(CellUtil.cloneQualifier((KeyValue)o)));
-120} else {
-121  throw new 
RuntimeException("object type not handled");
-122}
-123if (ii.hasNext()) {
-124  sb.append(',');
-125}
-126  }
-127}
-128if (i.hasNext()) {
-129  sb.append(',');
-130}
-131  }
-132}
-133if (startTime = 0  
endTime != Long.MAX_VALUE) {
-134  sb.append('/');
-135  sb.append(startTime);
-136  if (startTime != endTime) {
-137sb.append(',');
-138sb.append(endTime);
-139  }
-140} else if (endTime != Long.MAX_VALUE) 
{
-141  sb.append('/');
-142  sb.append(endTime);
-143}
-144if (maxVersions  1) {
-145  sb.append("?v=");
-146  sb.append(maxVersions);
-147}
-148return sb.toString();
-149  }
-150
-151  protected String 
buildMultiRowSpec(final byte[][] rows, int maxVersions) {
-152StringBuilder sb = new 
StringBuilder();
-153sb.append('/');
-154sb.append(Bytes.toString(name));
-155sb.append("/multiget/");
-156if (rows == null || rows.length == 0) 
{
-157  return sb.toString();
-158}
-159sb.append("?");
-160for(int i=0; irows.length; i++) 
{
-161  byte[] rk = rows[i];
-162  if (i != 0) {
-163sb.append('');
-164  }
-165  sb.append("row=");
-166  sb.append(toURLEncodedBytes(rk));
-167}
-168sb.append("v=");
-169sb.append(maxVersions);
-170
-171  

[48/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
index 84ddf7d..db3401c 100644
--- a/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
+++ b/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html
@@ -590,7 +590,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getValue(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey)
 Deprecated.
-Getter for accessing the metadata associated with the 
key
+Getter for accessing the metadata associated with the 
key.
 
 
 
@@ -1221,7 +1221,7 @@ implements 
 
 NAMESPACE_FAMILY_INFO
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String NAMESPACE_FAMILY_INFO
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String NAMESPACE_FAMILY_INFO
 Deprecated.
 
 See Also:
@@ -1235,7 +1235,7 @@ implements 
 
 NAMESPACE_FAMILY_INFO_BYTES
-public static finalbyte[] NAMESPACE_FAMILY_INFO_BYTES
+public static finalbyte[] NAMESPACE_FAMILY_INFO_BYTES
 Deprecated.
 
 
@@ -1245,7 +1245,7 @@ implements 
 
 NAMESPACE_COL_DESC_BYTES
-public static finalbyte[] NAMESPACE_COL_DESC_BYTES
+public static finalbyte[] NAMESPACE_COL_DESC_BYTES
 Deprecated.
 
 
@@ -1255,7 +1255,7 @@ implements 
 
 NAMESPACE_TABLEDESC
-public static finalHTableDescriptor NAMESPACE_TABLEDESC
+public static finalHTableDescriptor NAMESPACE_TABLEDESC
 Deprecated.
 Table descriptor for namespace table
 
@@ -1414,30 +1414,13 @@ implements 
-
-
-
-
-getValue
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetValue(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey)
-Deprecated.
-Getter for accessing the metadata associated with the 
key
-
-Parameters:
-key - The key.
-Returns:
-The value.
-
-
-
 
 
 
 
 
 getValues
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapBytes,BytesgetValues()
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapBytes,BytesgetValues()
 Deprecated.
 
 Specified by:
@@ -1453,7 +1436,7 @@ implements 
 
 setValue
-publicHTableDescriptorsetValue(byte[]key,
+publicHTableDescriptorsetValue(byte[]key,
  byte[]value)
 Deprecated.
 Setter for storing metadata as a (key, value) pair in 
map
@@ -1470,7 +1453,7 @@ implements 
 
 setValue
-publicHTableDescriptorsetValue(Byteskey,
+publicHTableDescriptorsetValue(Byteskey,
  Bytesvalue)
 Deprecated.
 
@@ -1481,7 +1464,7 @@ implements 
 
 setValue
-publicHTableDescriptorsetValue(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey,
+publicHTableDescriptorsetValue(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey,
  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringvalue)
 Deprecated.
 Setter for storing metadata as a (key, value) pair in 
map
@@ -1498,7 +1481,7 @@ implements 
 
 remove
-publicvoidremove(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey)
+publicvoidremove(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey)
 Deprecated.
 Remove metadata represented by the key from the map
 
@@ -1514,7 +1497,7 @@ implements 
 
 remove
-publicvoidremove(Byteskey)
+publicvoidremove(Byteskey)
 Deprecated.
 Remove metadata represented by the key from the map
 
@@ -1530,7 +1513,7 @@ implements 
 
 remove
-publicvoidremove(byte[]key)
+publicvoidremove(byte[]key)
 Deprecated.
 Remove metadata represented by the key from the map
 
@@ -1546,7 +1529,7 @@ implements 
 
 isReadOnly
-publicbooleanisReadOnly()
+publicbooleanisReadOnly()
 Deprecated.
 Check if the readOnly flag of the table is set. If the 
readOnly flag is
  set then the contents of the table can only be read from but not 
modified.
@@ -1564,7 +1547,7 @@ implements 
 
 setReadOnly
-publicHTableDescriptorsetReadOnly(booleanreadOnly)
+publicHTableDescriptorsetReadOnly(booleanreadOnly)
 Deprecated.
 

[40/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
index 0fba200..19ef87b 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HTableDescriptor.html
@@ -158,740 +158,734 @@
 150  }
 151
 152  /**
-153   * Getter for accessing the metadata 
associated with the key
-154   *
-155   * @param key The key.
-156   * @return The value.
-157   */
-158  public String getValue(String key) {
-159byte[] value = 
getValue(Bytes.toBytes(key));
-160return value == null ? null : 
Bytes.toString(value);
-161  }
-162
-163  /**
-164   * @return Getter for fetching an 
unmodifiable map.
+153   * @return Getter for fetching an 
unmodifiable map.
+154   */
+155  @Override
+156  public MapBytes, Bytes 
getValues() {
+157return delegatee.getValues();
+158  }
+159
+160  /**
+161   * Setter for storing metadata as a 
(key, value) pair in map
+162   *
+163   * @param key The key.
+164   * @param value The value. If null, 
removes the setting.
 165   */
-166  @Override
-167  public MapBytes, Bytes 
getValues() {
-168return delegatee.getValues();
+166  public HTableDescriptor setValue(byte[] 
key, byte[] value) {
+167
getDelegateeForModification().setValue(key, value);
+168return this;
 169  }
 170
-171  /**
+171  /*
 172   * Setter for storing metadata as a 
(key, value) pair in map
 173   *
 174   * @param key The key.
 175   * @param value The value. If null, 
removes the setting.
 176   */
-177  public HTableDescriptor setValue(byte[] 
key, byte[] value) {
+177  public HTableDescriptor setValue(final 
Bytes key, final Bytes value) {
 178
getDelegateeForModification().setValue(key, value);
 179return this;
 180  }
 181
-182  /*
+182  /**
 183   * Setter for storing metadata as a 
(key, value) pair in map
 184   *
 185   * @param key The key.
 186   * @param value The value. If null, 
removes the setting.
 187   */
-188  public HTableDescriptor setValue(final 
Bytes key, final Bytes value) {
-189
getDelegateeForModification().setValue(key, value);
+188  public HTableDescriptor setValue(String 
key, String value) {
+189
getDelegateeForModification().setValue(Bytes.toBytes(key), 
Bytes.toBytes(value));
 190return this;
 191  }
 192
 193  /**
-194   * Setter for storing metadata as a 
(key, value) pair in map
+194   * Remove metadata represented by the 
key from the map
 195   *
-196   * @param key The key.
-197   * @param value The value. If null, 
removes the setting.
+196   * @param key Key whose key and value 
we're to remove from HTableDescriptor
+197   * parameters.
 198   */
-199  public HTableDescriptor setValue(String 
key, String value) {
-200
getDelegateeForModification().setValue(Bytes.toBytes(key), 
Bytes.toBytes(value));
-201return this;
-202  }
-203
-204  /**
-205   * Remove metadata represented by the 
key from the map
-206   *
-207   * @param key Key whose key and value 
we're to remove from HTableDescriptor
-208   * parameters.
-209   */
-210  public void remove(final String key) 
{
-211
getDelegateeForModification().removeValue(Bytes.toBytes(key));
-212  }
-213
-214  /**
-215   * Remove metadata represented by the 
key from the map
-216   *
-217   * @param key Key whose key and value 
we're to remove from HTableDescriptor
-218   * parameters.
-219   */
-220  public void remove(Bytes key) {
-221
getDelegateeForModification().removeValue(key);
-222  }
-223
-224  /**
-225   * Remove metadata represented by the 
key from the map
+199  public void remove(final String key) 
{
+200
getDelegateeForModification().removeValue(Bytes.toBytes(key));
+201  }
+202
+203  /**
+204   * Remove metadata represented by the 
key from the map
+205   *
+206   * @param key Key whose key and value 
we're to remove from HTableDescriptor
+207   * parameters.
+208   */
+209  public void remove(Bytes key) {
+210
getDelegateeForModification().removeValue(key);
+211  }
+212
+213  /**
+214   * Remove metadata represented by the 
key from the map
+215   *
+216   * @param key Key whose key and value 
we're to remove from HTableDescriptor
+217   * parameters.
+218   */
+219  public void remove(final byte [] key) 
{
+220
getDelegateeForModification().removeValue(key);
+221  }
+222
+223  /**
+224   * Check if the readOnly flag of the 
table is set. If the readOnly flag is
+225   * set then the contents of the table 
can only be read from but not modified.
 226   *
-227   * @param key Key whose key and value 
we're to remove from HTableDescriptor
-228   * parameters.
-229   */
-230  public void remove(final byte [] key) 
{
-231
getDelegateeForModification().removeValue(key);
+227   * @return true if all columns in the 
table 

[50/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 6a526ed..1fe297c 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
 /Producer (Apache HBase Team)
-/CreationDate (D:20170707144826+00'00')
-/ModDate (D:20170707144826+00'00')
+/CreationDate (D:20170708144821+00'00')
+/ModDate (D:20170708144821+00'00')
 >>
 endobj
 2 0 obj
@@ -22,7 +22,7 @@ endobj
 3 0 obj
 << /Type /Pages
 /Count 679
-/Kids [7 0 R 13 0 R 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 39 0 R 43 0 R 47 0 R 55 
0 R 58 0 R 60 0 R 62 0 R 66 0 R 71 0 R 74 0 R 79 0 R 81 0 R 84 0 R 86 0 R 92 0 
R 101 0 R 106 0 R 108 0 R 120 0 R 123 0 R 130 0 R 136 0 R 145 0 R 154 0 R 166 0 
R 170 0 R 172 0 R 176 0 R 182 0 R 184 0 R 186 0 R 188 0 R 190 0 R 193 0 R 199 0 
R 201 0 R 204 0 R 206 0 R 208 0 R 210 0 R 212 0 R 214 0 R 217 0 R 220 0 R 225 0 
R 227 0 R 229 0 R 231 0 R 233 0 R 235 0 R 237 0 R 239 0 R 245 0 R 248 0 R 250 0 
R 252 0 R 254 0 R 259 0 R 264 0 R 269 0 R 272 0 R 275 0 R 291 0 R 300 0 R 306 0 
R 318 0 R 327 0 R 332 0 R 334 0 R 336 0 R 347 0 R 352 0 R 356 0 R 361 0 R 368 0 
R 379 0 R 381 0 R 393 0 R 407 0 R 416 0 R 418 0 R 420 0 R 426 0 R 437 0 R 448 0 
R 459 0 R 462 0 R 465 0 R 469 0 R 473 0 R 477 0 R 480 0 R 482 0 R 485 0 R 489 0 
R 491 0 R 495 0 R 500 0 R 504 0 R 510 0 R 512 0 R 518 0 R 520 0 R 524 0 R 533 0 
R 535 0 R 539 0 R 542 0 R 545 0 R 548 0 R 562 0 R 569 0 R 576 0 R 588 0 R 594 0 
R 602 0 R 611 0 R 614 0 R 618 0 R 621 0
  R 632 0 R 640 0 R 646 0 R 651 0 R 655 0 R 657 0 R 671 0 R 683 0 R 689 0 R 695 
0 R 698 0 R 707 0 R 715 0 R 719 0 R 724 0 R 729 0 R 731 0 R 733 0 R 735 0 R 743 
0 R 752 0 R 756 0 R 764 0 R 772 0 R 778 0 R 782 0 R 788 0 R 793 0 R 798 0 R 806 
0 R 808 0 R 812 0 R 817 0 R 823 0 R 826 0 R 833 0 R 843 0 R 847 0 R 849 0 R 852 
0 R 856 0 R 861 0 R 864 0 R 876 0 R 880 0 R 885 0 R 893 0 R 898 0 R 902 0 R 906 
0 R 908 0 R 911 0 R 913 0 R 917 0 R 919 0 R 923 0 R 927 0 R 931 0 R 936 0 R 941 
0 R 944 0 R 946 0 R 953 0 R 959 0 R 967 0 R 976 0 R 980 0 R 985 0 R 989 0 R 991 
0 R 1000 0 R 1003 0 R 1008 0 R 1011 0 R 1020 0 R 1023 0 R 1029 0 R 1036 0 R 
1039 0 R 1041 0 R 1050 0 R 1052 0 R 1054 0 R 1057 0 R 1059 0 R 1061 0 R 1063 0 
R 1065 0 R 1067 0 R 1071 0 R 1075 0 R 1080 0 R 1082 0 R 1084 0 R 1086 0 R 1088 
0 R 1093 0 R 1102 0 R 1105 0 R 1107 0 R 1109 0 R 1114 0 R 1116 0 R 1119 0 R 
1121 0 R 1123 0 R 1125 0 R 1128 0 R 1133 0 R 1138 0 R 1148 0 R 1153 0 R 1167 0 
R 1179 0 R 1183 0 R 1196 0 R 1205 0 R 1219 0 R 12
 23 0 R 1233 0 R 1246 0 R 1249 0 R 1261 0 R 1270 0 R 1277 0 R 1281 0 R 1291 0 R 
1296 0 R 1300 0 R 1306 0 R 1312 0 R 1319 0 R 1327 0 R 1329 0 R 1341 0 R 1343 0 
R 1348 0 R 1352 0 R 1357 0 R 1368 0 R 1374 0 R 1380 0 R 1382 0 R 1384 0 R 1396 
0 R 1402 0 R 1410 0 R 1415 0 R 1427 0 R 1434 0 R 1439 0 R 1449 0 R 1457 0 R 
1460 0 R 1466 0 R 1470 0 R 1473 0 R 1478 0 R 1481 0 R 1485 0 R 1491 0 R 1495 0 
R 1500 0 R 1506 0 R 1510 0 R 1513 0 R 1515 0 R 1523 0 R 1531 0 R 1537 0 R 1542 
0 R 1546 0 R 1549 0 R 1555 0 R 1561 0 R 1566 0 R 1568 0 R 1570 0 R 1573 0 R 
1575 0 R 1583 0 R 1586 0 R 1592 0 R 1600 0 R 1604 0 R 1609 0 R 1615 0 R 1618 0 
R 1620 0 R 1622 0 R 1624 0 R 1631 0 R 1641 0 R 1643 0 R 1645 0 R 1647 0 R 1649 
0 R 1652 0 R 1654 0 R 1656 0 R 1658 0 R 1661 0 R 1663 0 R 1665 0 R 1667 0 R 
1671 0 R 1675 0 R 1684 0 R 1686 0 R 1688 0 R 1690 0 R 1692 0 R 1699 0 R 1701 0 
R 1706 0 R 1708 0 R 1710 0 R 1717 0 R 1722 0 R 1728 0 R 1732 0 R 1735 0 R 1738 
0 R 1742 0 R 1744 0 R 1747 0 R 1749 0 R 1751 0 R 1753 0 R 
 1757 0 R 1759 0 R 1762 0 R 1764 0 R 1766 0 R 1768 0 R 1770 0 R 1778 0 R 1781 0 
R 1786 0 R 1788 0 R 1790 0 R 1792 0 R 1794 0 R 1802 0 R 1813 0 R 1816 0 R 1830 
0 R 1842 0 R 1846 0 R 1852 0 R 1857 0 R 1860 0 R 1865 0 R 1867 0 R 1872 0 R 
1874 0 R 1877 0 R 1879 0 R 1881 0 R 1883 0 R 1885 0 R 1889 0 R 1891 0 R 1895 0 
R 1899 0 R 1906 0 R 1913 0 R 1924 0 R 1938 0 R 1950 0 R 1967 0 R 1971 0 R 1973 
0 R 1977 0 R 1994 0 R 2002 0 R 2009 0 R 2018 0 R 2024 0 R 2034 0 R 2045 0 R 
2051 0 R 2060 0 R 2072 0 R 2089 0 R 2100 0 R 2103 0 R 2112 0 R 2127 0 R 2134 0 
R 2137 0 R 2142 0 R 2147 0 R 2157 0 R 2165 0 R 2168 0 R 2170 0 R 2174 0 R 2189 
0 R 2198 0 R 2203 0 R 2207 0 R 2210 0 R 2212 0 R 2214 0 R 2216 0 R 2218 0 R 
2223 0 R 2225 0 R 2235 0 R 2245 0 R 2252 0 R 2264 0 R 2269 0 R 2273 0 R 2286 0 
R 2293 0 R 2299 0 R 2301 0 R 2311 0 R 2318 0 R 2329 0 R 2333 0 R 2344 0 R 2350 
0 R 2360 0 R 2369 0 R 2377 0 R 2383 0 R 2388 0 R 2392 0 R 2396 0 R 2398 0 R 
2404 0 R 2408 0 R 2412 0 R 2418 0 R 2425 0 R 2430 0 R 2434 0 
 R 2443 0 R 2448 0 R 2453 0 R 2466 0 R 2473 0 R 2476 0 R 2482 0 R 2488 0 R 2492 
0 R 2496 0 R 2504 0 R 2510 0 R 2512 0 R 2518 0 R 2523 0 R 2526 0 R 2536 0 R 
2542 0 R 2551 0 R 2555 0 R 2564 0 R 2569 0 R 2572 0 R 2582 0 

[18/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 369cb85..021cf88 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -2986,10 +2986,25 @@ service.
 
 
 
+(package private) static HTableDescriptor
+HBaseAdmin.getHTableDescriptor(TableNametableName,
+   Connectionconnection,
+   RpcRetryingCallerFactoryrpcCallerFactory,
+   RpcControllerFactoryrpcControllerFactory,
+   intoperationTimeout,
+   intrpcTimeout)
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version.
+ use HBaseAdmin.getTableDescriptor(TableName,
+ Connection, 
RpcRetryingCallerFactory,RpcControllerFactory,int,int)
+
+
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">Optionalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 AsyncHBaseAdmin.getLastMajorCompactionTimestamp(TableNametableName)
 
-
+
 long
 Admin.getLastMajorCompactionTimestamp(TableNametableName)
 Get the timestamp of the last major compaction for the 
passed table
@@ -2998,74 +3013,74 @@ service.
  or 0 if no such HFile could be found.
 
 
-
+
 long
 HBaseAdmin.getLastMajorCompactionTimestamp(TableNametableName)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">Optionalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 AsyncAdmin.getLastMajorCompactionTimestamp(TableNametableName)
 Get the timestamp of the last major compaction for the 
passed table.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">Optionalhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 RawAsyncHBaseAdmin.getLastMajorCompactionTimestamp(TableNametableName)
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/function/Consumer.html?is-external=true;
 title="class or interface in java.util.function">Consumerhttp://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 AsyncProcess.getLogger(TableNametableName,
  longmax)
 
-
+
 private HRegionInfo
 HBaseAdmin.getMobRegionInfo(TableNametableName)
 
-
+
 int
 MetaCache.getNumberOfCachedRegionLocations(TableNametableName)
 Return the number of cached region for a table.
 
 
-
+
 (package private) int
 ConnectionImplementation.getNumberOfCachedRegionLocations(TableNametableName)
 
-
+
 default RawAsyncTable
 AsyncConnection.getRawTable(TableNametableName)
 Retrieve an RawAsyncTable implementation 
for accessing a table.
 
 
-
+
 AsyncTableBuilderRawAsyncTable
 AsyncConnection.getRawTableBuilder(TableNametableName)
 Returns an AsyncTableBuilder for creating 
RawAsyncTable.
 
 
-
+
 AsyncTableBuilderRawAsyncTable
 AsyncConnectionImpl.getRawTableBuilder(TableNametableName)
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],RegionLoad
 Admin.getRegionLoad(ServerNamesn,
  TableNametableName)
 Get RegionLoad of all regions hosted on a 
regionserver for a table.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],RegionLoad
 HBaseAdmin.getRegionLoad(ServerNamesn,
  TableNametableName)
 
-
+
 HRegionLocation
 ConnectionImplementation.getRegionLocation(TableNametableName,
  byte[]row,
  booleanreload)
 
-
+
 HRegionLocation
 ClusterConnection.getRegionLocation(TableNametableName,
  byte[]row,
@@ -3073,26 +3088,26 @@ service.
 Find region location hosting passed row
 
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 

hbase-site git commit: INFRA-10751 Empty commit

2017-07-08 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 9fb0764ba -> dc577176c


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/dc577176
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/dc577176
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/dc577176

Branch: refs/heads/asf-site
Commit: dc577176c42b3546c7631e3659143a955c8fa3f5
Parents: 9fb0764
Author: jenkins 
Authored: Sat Jul 8 15:02:12 2017 +
Committer: jenkins 
Committed: Sat Jul 8 15:02:12 2017 +

--

--




[01/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 225052054 -> 9fb0764ba


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/io/crypto/class-use/Encryption.Context.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/crypto/class-use/Encryption.Context.html
 
b/devapidocs/org/apache/hadoop/hbase/io/crypto/class-use/Encryption.Context.html
index 9842663..4c3213b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/crypto/class-use/Encryption.Context.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/crypto/class-use/Encryption.Context.html
@@ -332,33 +332,33 @@
 
 
 static StoreFileWriter
-MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf,
+MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf,
 org.apache.hadoop.fs.FileSystemfs,
-HColumnDescriptorfamily,
-MobFileNamemobFileName,
-org.apache.hadoop.fs.PathbasePath,
+ColumnFamilyDescriptorfamily,
+org.apache.hadoop.fs.Pathpath,
 longmaxKeyCount,
 Compression.Algorithmcompression,
 CacheConfigcacheConfig,
 Encryption.ContextcryptoContext,
+ChecksumTypechecksumType,
+intbytesPerChecksum,
+intblocksize,
+BloomTypebloomType,
 booleanisCompaction)
 Creates a writer for the mob file in temp directory.
 
 
 
 static StoreFileWriter
-MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf,
+MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf,
 org.apache.hadoop.fs.FileSystemfs,
 HColumnDescriptorfamily,
-org.apache.hadoop.fs.Pathpath,
+MobFileNamemobFileName,
+org.apache.hadoop.fs.PathbasePath,
 longmaxKeyCount,
 Compression.Algorithmcompression,
 CacheConfigcacheConfig,
 Encryption.ContextcryptoContext,
-ChecksumTypechecksumType,
-intbytesPerChecksum,
-intblocksize,
-BloomTypebloomType,
 booleanisCompaction)
 Creates a writer for the mob file in temp directory.
 
@@ -463,8 +463,8 @@
 
 
 static Encryption.Context
-EncryptionUtil.createEncryptionContext(org.apache.hadoop.conf.Configurationconf,
-   HColumnDescriptorfamily)
+EncryptionUtil.createEncryptionContext(org.apache.hadoop.conf.Configurationconf,
+   ColumnFamilyDescriptorfamily)
 Helper to create an encyption context.
 
 



[08/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
index 427d86f..68cf5fb 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer.html
@@ -126,7 +126,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private abstract class RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer
+private abstract class RawAsyncHBaseAdmin.NamespaceProcedureBiConsumer
 extends RawAsyncHBaseAdmin.ProcedureBiConsumer
 
 
@@ -248,7 +248,7 @@ extends 
 
 namespaceName
-protected finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String namespaceName
+protected finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String namespaceName
 
 
 
@@ -265,7 +265,7 @@ extends 
 
 NamespaceProcedureBiConsumer
-NamespaceProcedureBiConsumer(AsyncAdminadmin,
+NamespaceProcedureBiConsumer(AsyncAdminadmin,
  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringnamespaceName)
 
 
@@ -283,7 +283,7 @@ extends 
 
 getOperationType
-abstracthttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOperationType()
+abstracthttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOperationType()
 
 
 
@@ -292,7 +292,7 @@ extends 
 
 getDescription
-http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetDescription()
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetDescription()
 
 
 
@@ -301,7 +301,7 @@ extends 
 
 onFinished
-voidonFinished()
+voidonFinished()
 
 Specified by:
 onFinishedin
 classRawAsyncHBaseAdmin.ProcedureBiConsumer
@@ -314,7 +314,7 @@ extends 
 
 onError
-voidonError(http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwableerror)
+voidonError(http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwableerror)
 
 Specified by:
 onErrorin
 classRawAsyncHBaseAdmin.ProcedureBiConsumer

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
index 2a58b36..e1a7afe 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.ProcedureBiConsumer.html
@@ -121,7 +121,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private abstract class RawAsyncHBaseAdmin.ProcedureBiConsumer
+private abstract class RawAsyncHBaseAdmin.ProcedureBiConsumer
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/util/function/BiConsumer.html?is-external=true;
 title="class or interface in java.util.function">BiConsumerhttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void,http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwable
 
@@ -226,7 +226,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/function/
 
 
 admin
-protected finalAsyncAdmin admin
+protected finalAsyncAdmin admin
 
 
 
@@ -243,7 +243,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/function/
 
 
 ProcedureBiConsumer
-ProcedureBiConsumer(AsyncAdminadmin)
+ProcedureBiConsumer(AsyncAdminadmin)
 
 
 
@@ -260,7 +260,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/function/
 
 
 onFinished
-abstractvoidonFinished()
+abstractvoidonFinished()
 
 
 
@@ -269,7 +269,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/function/
 
 
 onError

[41/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html 
b/apidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
index e8ddeb4..c931bd9 100644
--- a/apidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
+++ b/apidocs/org/apache/hadoop/hbase/rest/client/RemoteHTable.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":42,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":42,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":42,"i48":10,"i49":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":42,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":42,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":42,"i49":10,"i50":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class RemoteHTable
+public class RemoteHTable
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements Table
 HTable interface to remote tables accessed via REST 
gateway
@@ -385,78 +385,84 @@ implements 
 
 
+TableDescriptor
+getDescriptor()
+Gets the table 
descriptor for this table.
+
+
+
 TableName
 getName()
 Gets the fully qualified table name instance of this 
table.
 
 
-
+
 int
 getOperationTimeout()
 Get timeout (millisecond) of each operation for in Table 
instance.
 
 
-
+
 int
 getReadRpcTimeout()
 Get timeout (millisecond) of each rpc read request in this 
Table instance.
 
 
-
+
 int
 getRpcTimeout()
 Deprecated.
 
 
-
+
 ResultScanner
 getScanner(byte[]family)
 Gets a scanner on the current table for the given 
family.
 
 
-
+
 ResultScanner
 getScanner(byte[]family,
   byte[]qualifier)
 Gets a scanner on the current table for the given family 
and qualifier.
 
 
-
+
 ResultScanner
 getScanner(Scanscan)
 Returns a scanner on the current table as specified by the 
Scan
  object.
 
 
-
+
 HTableDescriptor
 getTableDescriptor()
 Gets the table descriptor for 
this table.
 
 
-
+
 byte[]
 getTableName()
 
-
+
 long
 getWriteBufferSize()
 Returns the maximum size in bytes of the write buffer for 
this HTable.
 
 
-
+
 int
 getWriteRpcTimeout()
 Get timeout (millisecond) of each rpc write request in this 
Table instance.
 
 
-
+
 Result
 increment(Incrementincrement)
 Increments one or more columns within a single row.
 
 
-
+
 long
 incrementColumnValue(byte[]row,
 byte[]family,
@@ -465,7 +471,7 @@ implements See Table.incrementColumnValue(byte[],
 byte[], byte[], long, Durability)
 
 
-
+
 long
 incrementColumnValue(byte[]row,
 byte[]family,
@@ -475,55 +481,55 @@ implements Atomically increments a column value.
 
 
-
+
 boolean
 isAutoFlush()
 
-
+
 void
 mutateRow(RowMutationsrm)
 Performs multiple mutations atomically on a single 
row.
 
 
-
+
 void
 put(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPutputs)
 Puts some data in the table, in batch.
 
 
-
+
 void
 put(Putput)
 Puts some data in the table.
 
 
-
+
 void
 setOperationTimeout(intoperationTimeout)
 Set timeout (millisecond) of each operation in this Table 
instance, will override the value
  of hbase.client.operation.timeout in configuration.
 
 
-
+
 void
 setReadRpcTimeout(intreadRpcTimeout)
 Set timeout (millisecond) of each rpc read request in 
operations of this Table instance, will
  override the value of hbase.rpc.read.timeout in configuration.
 
 
-
+
 void
 setRpcTimeout(intrpcTimeout)
 Deprecated.
 
 
-
+
 void
 setWriteBufferSize(longwriteBufferSize)
 Sets the size of the buffer in bytes.
 
 
-
+
 void
 setWriteRpcTimeout(intwriteRpcTimeout)
 Set timeout (millisecond) of each rpc write request in 
operations of this Table instance, will
@@ -558,7 +564,7 @@ implements 
 
 RemoteHTable
-publicRemoteHTable(Clientclient,
+publicRemoteHTable(Clientclient,
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or 

[45/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
index 9d51767..3dd4c78 100644
--- a/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/apidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":18,"i10":6,"i11":18,"i12":6,"i13":6,"i14":6,"i15":18,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":18,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":18,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":18,"i43":6,"i44":6,"i45":6,"i46":18,"i47":6,"i48":18,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":18,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":18,"i64":6,"i65":18,"i66":6,"i67":18,"i68":6,"i69":18,"i70":6,"i71":6,"i72":18,"i73":6,"i74":18,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":18,"i90":18,"i91":6,"i92":6,"i93":18,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":18,"i10":6,"i11":18,"i12":6,"i13":6,"i14":6,"i15":18,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":18,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":18,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":18,"i43":6,"i44":6,"i45":6,"i46":18,"i47":6,"i48":18,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":18,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":18,"i67":6,"i68":18,"i69":6,"i70":18,"i71":6,"i72":18,"i73":6,"i74":6,"i75":18,"i76":6,"i77":18,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":18,"i99":18,"i100":6,"i101":6,"i102":18,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -456,11 +456,29 @@ public interface 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isCatalogJanitorOn()
+Query on the catalog janitor state.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isCleanerChoreOn()
+Query the current state of the cleaner chore.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 isMasterInMaintenanceMode()
 Check whether master is in maintenance mode
 
 
-
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isNormalizerOn()
+Query the current state of the region normalizer
+
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 isProcedureFinished(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringsignature,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringinstance,
@@ -468,17 +486,17 @@ public interface Check the current state of the specified procedure.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 

[05/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html
 
b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html
index a5aadc9..05a7362 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/TableDescriptorBuilder.ModifyableTableDescriptor.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":9,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":42,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":9,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":42,"i56":42,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":9,"i67":10,"i68":10,"i69":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":9,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":42,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":9,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":42,"i57":42,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":9,"i68":10,"i69":10,"i70":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -398,243 +398,249 @@ implements 
+http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+getValue(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey)
+Getter for accessing the metadata associated with the 
key.
+
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapBytes,Bytes
 getValues()
 Getter for fetching an unmodifiable values
 map.
 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/lang/StringBuilder.html?is-external=true;
 title="class or interface in java.lang">StringBuilder
 getValues(booleanprintDefaults)
 
-
+
 boolean
 hasColumnFamily(byte[]familyName)
 Checks to see if this table contains the given column 
family
 
 
-
+
 boolean
 hasCoprocessor(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringclassNameToMatch)
 Check if the table has an attached co-processor represented 
by the name
  className
 
 
-
+
 int
 hashCode()
 
-
+
 boolean
 hasRegionMemstoreReplication()
 
-
+
 boolean
 hasSerialReplicationScope()
 Return true if there are at least one cf whose replication 
scope is
  serial.
 
 
-
+
 boolean
 isCompactionEnabled()
 Check if the compaction enable flag of the table is 
true.
 
 
-
+
 boolean
 isMetaRegion()
 Checks if this table is  hbase:meta  
region.
 
 
-
+
 boolean
 isMetaTable()
 Checks if the table is a hbase:meta table
 
 
-
+
 boolean
 isNormalizationEnabled()
 Check if normalization enable flag of the table is 
true.
 
 
-
+
 boolean
 isReadOnly()
 Check if the readOnly flag of the table is set.
 
 
-
+
 TableDescriptorBuilder.ModifyableTableDescriptor
 modifyColumnFamily(ColumnFamilyDescriptorfamily)
 Modifies the existing column family.
 
 
-
+
 private static TableDescriptor
 parseFrom(byte[]bytes)
 
-
+
 private TableDescriptorBuilder.ModifyableTableDescriptor
 putColumnFamily(ColumnFamilyDescriptorfamily)
 
-
+
 ColumnFamilyDescriptor
 removeColumnFamily(byte[]column)
 Removes the ColumnFamilyDescriptor with name specified by 
the parameter column
  from the table descriptor
 
 
-
+
 TableDescriptorBuilder.ModifyableTableDescriptor
 removeConfiguration(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey)
 Remove a config setting represented by the key from the
  configuration
 map
 
 
-
+
 void
 removeCoprocessor(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringclassName)
 Remove a coprocessor from those set on 

[49/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apache_hbase_reference_guide.pdfmarks
--
diff --git a/apache_hbase_reference_guide.pdfmarks 
b/apache_hbase_reference_guide.pdfmarks
index 0a96560..eccc78e 100644
--- a/apache_hbase_reference_guide.pdfmarks
+++ b/apache_hbase_reference_guide.pdfmarks
@@ -2,8 +2,8 @@
   /Author (Apache HBase Team)
   /Subject ()
   /Keywords ()
-  /ModDate (D:20170707144950)
-  /CreationDate (D:20170707144950)
+  /ModDate (D:20170708144945)
+  /CreationDate (D:20170708144945)
   /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
   /Producer ()
   /DOCINFO pdfmark

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/deprecated-list.html
--
diff --git a/apidocs/deprecated-list.html b/apidocs/deprecated-list.html
index 8c09782..61c560e 100644
--- a/apidocs/deprecated-list.html
+++ b/apidocs/deprecated-list.html
@@ -252,6 +252,30 @@
 org.apache.hadoop.hbase.mapreduce.CellCreator.create(byte[],
 int, int, byte[], int, int, byte[], int, int, long, byte[], int, int, 
String)
 
 
+org.apache.hadoop.hbase.client.Admin.createTable(HTableDescriptor)
+since 2.0 version and will 
be removed in 3.0 version.
+ use Admin.createTable(TableDescriptor)
+
+
+
+org.apache.hadoop.hbase.client.Admin.createTable(HTableDescriptor,
 byte[][])
+since 2.0 version and will 
be removed in 3.0 version.
+ use Admin.createTable(TableDescriptor,
 byte[][])
+
+
+
+org.apache.hadoop.hbase.client.Admin.createTable(HTableDescriptor,
 byte[], byte[], int)
+since 2.0 version and will 
be removed in 3.0 version.
+ use Admin.createTable(TableDescriptor,
 byte[], byte[], int)
+
+
+
+org.apache.hadoop.hbase.client.Admin.createTableAsync(HTableDescriptor,
 byte[][])
+since 2.0 version and will 
be removed in 3.0 version.
+ use Admin.createTableAsync(TableDescriptor,
 byte[][])
+
+
+
 org.apache.hadoop.hbase.client.Admin.deleteColumn(TableName,
 byte[])
 As of release 2.0.0.
  (https://issues.apache.org/jira/browse/HBASE-1989;>HBASE-1989).
@@ -260,6 +284,22 @@
 
 
 
+org.apache.hadoop.hbase.client.Admin.deleteTables(Pattern)
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.util.regex.Pattern)
+ and Admin.enableTable(org.apache.hadoop.hbase.TableName)
+
+
+
+org.apache.hadoop.hbase.client.Admin.deleteTables(String)
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.lang.String)
+ and Admin.enableTable(org.apache.hadoop.hbase.TableName)
+
+
+
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.disablePeer(String)
 use Admin.disableReplicationPeer(String)
  instead
@@ -272,6 +312,22 @@
 
 
 
+org.apache.hadoop.hbase.client.Admin.disableTables(Pattern)
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.util.regex.Pattern)
+ and Admin.disableTable(org.apache.hadoop.hbase.TableName)
+
+
+
+org.apache.hadoop.hbase.client.Admin.disableTables(String)
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.lang.String)
+ and Admin.disableTable(org.apache.hadoop.hbase.TableName)
+
+
+
 org.apache.hadoop.hbase.client.replication.ReplicationAdmin.enablePeer(String)
 use Admin.enableReplicationPeer(String)
  instead
@@ -284,6 +340,22 @@
 
 
 
+org.apache.hadoop.hbase.client.Admin.enableTables(Pattern)
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.util.regex.Pattern)
+ and Admin.enableTable(org.apache.hadoop.hbase.TableName)
+
+
+
+org.apache.hadoop.hbase.client.Admin.enableTables(String)
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.lang.String)
+ and Admin.enableTable(org.apache.hadoop.hbase.TableName)
+
+
+
 org.apache.hadoop.hbase.rest.client.RemoteHTable.exists(ListGet)
 
 
@@ -428,6 +500,30 @@
 
 
 
+org.apache.hadoop.hbase.client.Table.getTableDescriptor()
+since 2.0 version and will 
be removed in 3.0 version.
+ use Table.getDescriptor()
+
+
+
+org.apache.hadoop.hbase.client.Admin.getTableDescriptor(TableName)
+since 2.0 version and will 
be removed in 3.0 version.
+  

[37/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/src-html/org/apache/hadoop/hbase/client/Table.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Table.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Table.html
index 2ff095f..515b2b7 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Table.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Table.html
@@ -73,600 +73,609 @@
 065  /**
 066   * Gets the {@link 
org.apache.hadoop.hbase.HTableDescriptor table descriptor} for this table.
 067   * @throws java.io.IOException if a 
remote or network exception occurs.
-068   */
-069  HTableDescriptor getTableDescriptor() 
throws IOException;
-070
-071  /**
-072   * Test for the existence of columns in 
the table, as specified by the Get.
-073   * p
-074   *
-075   * This will return true if the Get 
matches one or more keys, false if not.
-076   * p
-077   *
-078   * This is a server-side call so it 
prevents any data from being transfered to
-079   * the client.
-080   *
-081   * @param get the Get
-082   * @return true if the specified Get 
matches one or more keys, false if not
-083   * @throws IOException e
-084   */
-085  boolean exists(Get get) throws 
IOException;
-086
-087  /**
-088   * Test for the existence of columns in 
the table, as specified by the Gets.
-089   * p
-090   *
-091   * This will return an array of 
booleans. Each value will be true if the related Get matches
-092   * one or more keys, false if not.
-093   * p
-094   *
-095   * This is a server-side call so it 
prevents any data from being transferred to
-096   * the client.
-097   *
-098   * @param gets the Gets
-099   * @return Array of boolean.  True if 
the specified Get matches one or more keys, false if not.
-100   * @throws IOException e
-101   */
-102  boolean[] existsAll(ListGet 
gets) throws IOException;
-103
-104  /**
-105   * Method that does a batch call on 
Deletes, Gets, Puts, Increments and Appends.
-106   * The ordering of execution of the 
actions is not defined. Meaning if you do a Put and a
-107   * Get in the same {@link #batch} call, 
you will not necessarily be
-108   * guaranteed that the Get returns what 
the Put had put.
-109   *
-110   * @param actions list of Get, Put, 
Delete, Increment, Append objects
-111   * @param results Empty Object[], same 
size as actions. Provides access to partial
-112   *results, in case an 
exception is thrown. A null in the result array means that
-113   *the call for that 
action failed, even after retries. The order of the objects
-114   *in the results array 
corresponds to the order of actions in the request list.
-115   * @throws IOException
-116   * @since 0.90.0
-117   */
-118  void batch(final List? extends 
Row actions, final Object[] results) throws IOException,
-119InterruptedException;
-120
-121  /**
-122   * Same as {@link #batch(List, 
Object[])}, but with a callback.
-123   * @since 0.96.0
-124   */
-125  R void batchCallback(
-126final List? extends Row 
actions, final Object[] results, final Batch.CallbackR callback
-127  )
-128throws IOException, 
InterruptedException;
+068   * @deprecated since 2.0 version and 
will be removed in 3.0 version.
+069   * use {@link 
#getDescriptor()}
+070   */
+071  @Deprecated
+072  HTableDescriptor getTableDescriptor() 
throws IOException;
+073
+074  /**
+075   * Gets the {@link 
org.apache.hadoop.hbase.client.TableDescriptor table descriptor} for this 
table.
+076   * @throws java.io.IOException if a 
remote or network exception occurs.
+077   */
+078  TableDescriptor getDescriptor() throws 
IOException;
+079
+080  /**
+081   * Test for the existence of columns in 
the table, as specified by the Get.
+082   * p
+083   *
+084   * This will return true if the Get 
matches one or more keys, false if not.
+085   * p
+086   *
+087   * This is a server-side call so it 
prevents any data from being transfered to
+088   * the client.
+089   *
+090   * @param get the Get
+091   * @return true if the specified Get 
matches one or more keys, false if not
+092   * @throws IOException e
+093   */
+094  boolean exists(Get get) throws 
IOException;
+095
+096  /**
+097   * Test for the existence of columns in 
the table, as specified by the Gets.
+098   * p
+099   *
+100   * This will return an array of 
booleans. Each value will be true if the related Get matches
+101   * one or more keys, false if not.
+102   * p
+103   *
+104   * This is a server-side call so it 
prevents any data from being transferred to
+105   * the client.
+106   *
+107   * @param gets the Gets
+108   * @return Array of boolean.  True if 
the specified Get matches one or more keys, false if not.
+109   * @throws IOException e
+110   */
+111  boolean[] existsAll(ListGet 
gets) throws IOException;
+112
+113  /**
+114   * Method that does a batch call on 
Deletes, Gets, 

[47/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
index 825d6ec..a48bdf2 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
@@ -93,13 +93,6 @@
 
 
 
-org.apache.hadoop.hbase.mapreduce
-
-Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce;>MapReduce
-Input/OutputFormats, a table indexing MapReduce job, and utility methods.
-
-
-
 org.apache.hadoop.hbase.rest.client
 
 
@@ -379,99 +372,159 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 HTableDescriptor[]
 Admin.deleteTables(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
-Delete tables matching the passed in pattern and wait on 
completion.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.util.regex.Pattern)
+ and Admin.enableTable(org.apache.hadoop.hbase.TableName)
+
 
 
 
 HTableDescriptor[]
 Admin.deleteTables(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
-Deletes tables matching the passed in pattern and wait on 
completion.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.lang.String)
+ and Admin.enableTable(org.apache.hadoop.hbase.TableName)
+
 
 
 
 HTableDescriptor[]
 Admin.disableTables(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
-Disable tables matching the passed in pattern and wait on 
completion.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.util.regex.Pattern)
+ and Admin.disableTable(org.apache.hadoop.hbase.TableName)
+
 
 
 
 HTableDescriptor[]
 Admin.disableTables(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
-Disable tables matching the passed in pattern and wait on 
completion.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.lang.String)
+ and Admin.disableTable(org.apache.hadoop.hbase.TableName)
+
 
 
 
 HTableDescriptor[]
 Admin.enableTables(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
-Enable tables matching the passed in pattern and wait on 
completion.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.util.regex.Pattern)
+ and Admin.enableTable(org.apache.hadoop.hbase.TableName)
+
 
 
 
 HTableDescriptor[]
 Admin.enableTables(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
-Enable tables matching the passed in pattern and wait on 
completion.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.lang.String)
+ and Admin.enableTable(org.apache.hadoop.hbase.TableName)
+
 
 
 
 HTableDescriptor
 Table.getTableDescriptor()
-Gets the table descriptor for 
this table.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version.
+ use Table.getDescriptor()
+
 
 
 
 HTableDescriptor
 Admin.getTableDescriptor(TableNametableName)
-Method for getting the tableDescriptor
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version.
+ use Admin.listTableDescriptor(TableName)
+
 
 
 
 HTableDescriptor[]
 Admin.getTableDescriptors(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnames)
-Get tableDescriptors
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 

[25/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 945d1bd..a2e2cbd 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -26,7 +26,7 @@ under the License.
 2007 - 2017 The Apache Software Foundation
 
   File: 2242,
- Errors: 14711,
+ Errors: 14778,
  Warnings: 0,
  Infos: 0
   
@@ -279,7 +279,7 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
@@ -1007,7 +1007,7 @@ under the License.
   0
 
 
-  52
+  54
 
   
   
@@ -1231,7 +1231,7 @@ under the License.
   0
 
 
-  7
+  8
 
   
   
@@ -1273,7 +1273,7 @@ under the License.
   0
 
 
-  2
+  3
 
   
   
@@ -1329,7 +1329,7 @@ under the License.
   0
 
 
-  4
+  5
 
   
   
@@ -5627,7 +5627,7 @@ under the License.
   0
 
 
-  185
+  186
 
   
   
@@ -6551,7 +6551,7 @@ under the License.
   0
 
 
-  9
+  11
 
   
   
@@ -6775,7 +6775,7 @@ under the License.
   0
 
 
-  1
+  2
 
   
   
@@ -8035,7 +8035,7 @@ under the License.
   0
 
 
-  9
+  10
 
   
   
@@ -11073,7 +11073,7 @@ under the License.
   0
 
 
-  2
+  3
 
   
   
@@ -11423,7 +11423,7 @@ under the License.
   0
 
 
-  20
+  24
 
   
   
@@ -12207,7 +12207,7 @@ under the License.
   0
 
 
-  8
+  9
 
   
   
@@ -16449,7 +16449,7 @@ under the License.
   0
 
 
-  34
+  35
 
   
   
@@ -17933,7 +17933,7 @@ under the License.
   0
 
 
-  4
+  5
 
   
   
@@ -19935,7 +19935,7 @@ under the License.
   0
 
 
-  12
+  15
 
   
   
@@ -20957,7 +20957,7 @@ under the License.
   0
 
 
-  5
+  6
 
   
   
@@ -20999,7 +20999,7 @@ under the License.
   0
 
 
-  8
+  9
 
   
   
@@ -21461,7 +21461,7 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
@@ -21685,7 +21685,7 @@ under the License.
   0
 
 
-  17
+  18
 
   
   
@@ -24401,7 +24401,7 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
@@ -25213,7 +25213,7 @@ under the License.
   0
 
 
-  14
+  15
 
   
   
@@ -26487,7 +26487,7 @@ under the License.
   0
 
 
-  11
+  12
 
   
   
@@ -27145,7 

[19/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index fdb7533..dc3f671 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -5353,82 +5353,82 @@
 
 
 void
-Canary.Sink.publishReadFailure(ServerNameserverName,
+Canary.Sink.publishReadFailure(ServerNameserverName,
   HRegionInforegion,
+  ColumnFamilyDescriptorcolumn,
   http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exceptione)
 
 
 void
-Canary.StdOutSink.publishReadFailure(ServerNameserverName,
+Canary.StdOutSink.publishReadFailure(ServerNameserverName,
   HRegionInforegion,
+  ColumnFamilyDescriptorcolumn,
   http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exceptione)
 
 
 void
-Canary.Sink.publishReadFailure(ServerNameserverName,
+Canary.Sink.publishReadFailure(ServerNameserverName,
   HRegionInforegion,
-  HColumnDescriptorcolumn,
   http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exceptione)
 
 
 void
-Canary.StdOutSink.publishReadFailure(ServerNameserverName,
+Canary.StdOutSink.publishReadFailure(ServerNameserverName,
   HRegionInforegion,
-  HColumnDescriptorcolumn,
   http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exceptione)
 
 
 void
-Canary.Sink.publishReadTiming(ServerNameserverName,
+Canary.Sink.publishReadTiming(ServerNameserverName,
  HRegionInforegion,
- HColumnDescriptorcolumn,
+ ColumnFamilyDescriptorcolumn,
  longmsTime)
 
 
 void
-Canary.StdOutSink.publishReadTiming(ServerNameserverName,
+Canary.StdOutSink.publishReadTiming(ServerNameserverName,
  HRegionInforegion,
- HColumnDescriptorcolumn,
+ ColumnFamilyDescriptorcolumn,
  longmsTime)
 
 
 void
-Canary.Sink.publishWriteFailure(ServerNameserverName,
+Canary.Sink.publishWriteFailure(ServerNameserverName,
HRegionInforegion,
+   ColumnFamilyDescriptorcolumn,
http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exceptione)
 
 
 void
-Canary.StdOutSink.publishWriteFailure(ServerNameserverName,
+Canary.StdOutSink.publishWriteFailure(ServerNameserverName,
HRegionInforegion,
+   ColumnFamilyDescriptorcolumn,
http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exceptione)
 
 
 void
-Canary.Sink.publishWriteFailure(ServerNameserverName,
+Canary.Sink.publishWriteFailure(ServerNameserverName,
HRegionInforegion,
-   HColumnDescriptorcolumn,
http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exceptione)
 
 
 void
-Canary.StdOutSink.publishWriteFailure(ServerNameserverName,
+Canary.StdOutSink.publishWriteFailure(ServerNameserverName,
HRegionInforegion,
-   HColumnDescriptorcolumn,
http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exceptione)
 
 
 void
-Canary.Sink.publishWriteTiming(ServerNameserverName,
+Canary.Sink.publishWriteTiming(ServerNameserverName,
   HRegionInforegion,
-  HColumnDescriptorcolumn,
+  ColumnFamilyDescriptorcolumn,
   longmsTime)
 
 
 void
-Canary.StdOutSink.publishWriteTiming(ServerNameserverName,
+Canary.StdOutSink.publishWriteTiming(ServerNameserverName,
   HRegionInforegion,
-  HColumnDescriptorcolumn,
+  ColumnFamilyDescriptorcolumn,
   longmsTime)
 
 



[24/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 441a4c7..7c9f981 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -15358,11 +15358,11 @@
 Increases the given byte size of a Bloom filter until it 
can be folded by
  the given factor.
 
-computeHDFSBlocksDistribution(Configuration,
 HTableDescriptor, HRegionInfo) - Static method in class 
org.apache.hadoop.hbase.regionserver.HRegion
+computeHDFSBlocksDistribution(Configuration,
 TableDescriptor, HRegionInfo) - Static method in class 
org.apache.hadoop.hbase.regionserver.HRegion
 
 This is a helper function to compute HDFS block 
distribution on demand
 
-computeHDFSBlocksDistribution(Configuration,
 HTableDescriptor, HRegionInfo, Path) - Static method in class 
org.apache.hadoop.hbase.regionserver.HRegion
+computeHDFSBlocksDistribution(Configuration,
 TableDescriptor, HRegionInfo, Path) - Static method in class 
org.apache.hadoop.hbase.regionserver.HRegion
 
 This is a helper function to compute HDFS block 
distribution on demand
 
@@ -15966,7 +15966,7 @@
 Configure a MapReduce Job to perform an incremental load 
into the given
  table.
 
-configureIncrementalLoad(Job,
 HTableDescriptor, RegionLocator) - Static method in class 
org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2
+configureIncrementalLoad(Job,
 TableDescriptor, RegionLocator) - Static method in class 
org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2
 
 Configure a MapReduce Job to perform an incremental load 
into the given
  table.
@@ -15976,11 +15976,11 @@
 configureIncrementalLoad(Job,
 ListHFileOutputFormat2.TableInfo) - Static method in class 
org.apache.hadoop.hbase.mapreduce.MultiTableHFileOutputFormat
 
 Analogous to
- HFileOutputFormat2.configureIncrementalLoad(Job,
 HTableDescriptor, RegionLocator),
+ HFileOutputFormat2.configureIncrementalLoad(Job,
 TableDescriptor, RegionLocator),
  this function will configure the requisite number of reducers to write HFiles 
for multple
  tables simultaneously
 
-configureIncrementalLoadMap(Job,
 HTableDescriptor) - Static method in class 
org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2
+configureIncrementalLoadMap(Job,
 TableDescriptor) - Static method in class 
org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2
 
 configureInfoServer()
 - Method in class org.apache.hadoop.hbase.master.HMaster
 
@@ -18199,11 +18199,11 @@
 
 Create a cache for region to list of servers
 
-createCacheConf(HColumnDescriptor)
 - Method in class org.apache.hadoop.hbase.regionserver.HMobStore
+createCacheConf(ColumnFamilyDescriptor)
 - Method in class org.apache.hadoop.hbase.regionserver.HMobStore
 
 Creates the mob cache config.
 
-createCacheConf(HColumnDescriptor)
 - Method in class org.apache.hadoop.hbase.regionserver.HStore
+createCacheConf(ColumnFamilyDescriptor)
 - Method in class org.apache.hadoop.hbase.regionserver.HStore
 
 Creates the cache config.
 
@@ -18589,7 +18589,7 @@
 
 createEncoder(String)
 - Static method in enum org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
 
-createEncryptionContext(Configuration,
 HColumnDescriptor) - Static method in class 
org.apache.hadoop.hbase.security.EncryptionUtil
+createEncryptionContext(Configuration,
 ColumnFamilyDescriptor) - Static method in class 
org.apache.hadoop.hbase.security.EncryptionUtil
 
 Helper to create an encyption context.
 
@@ -18909,11 +18909,11 @@
 
 createHostAndPortStr(String,
 int) - Static method in class org.apache.hadoop.hbase.util.Addressing
 
-createHRegion(HRegionInfo,
 Path, Configuration, HTableDescriptor, WAL, boolean) - Static 
method in class org.apache.hadoop.hbase.regionserver.HRegion
+createHRegion(HRegionInfo,
 Path, Configuration, TableDescriptor, WAL, boolean) - Static method 
in class org.apache.hadoop.hbase.regionserver.HRegion
 
 Convenience method creating new HRegions.
 
-createHRegion(HRegionInfo,
 Path, Configuration, HTableDescriptor, WAL) - Static method in 
class org.apache.hadoop.hbase.regionserver.HRegion
+createHRegion(HRegionInfo,
 Path, Configuration, TableDescriptor, WAL) - Static method in class 
org.apache.hadoop.hbase.regionserver.HRegion
 
 createHRegionInfos(HTableDescriptor,
 byte[][]) - Static method in class org.apache.hadoop.hbase.util.ModifyRegionUtils
 
@@ -19828,14 +19828,35 @@
 
 createTable(HTableDescriptor)
 - Method in interface org.apache.hadoop.hbase.client.Admin
 
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version.
+ use Admin.createTable(TableDescriptor)
+
+
+createTable(TableDescriptor)
 - Method in interface org.apache.hadoop.hbase.client.Admin
+
 Creates a new table.
 
 createTable(HTableDescriptor,
 byte[], byte[], int) - Method in interface 
org.apache.hadoop.hbase.client.Admin
 
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version.
+ use 

[43/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
index 5c31929..35ed42c 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
@@ -92,6 +92,17 @@
 Provides HBase Client
 
 
+
+org.apache.hadoop.hbase.mapreduce
+
+Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce;>MapReduce
+Input/OutputFormats, a table indexing MapReduce job, and utility methods.
+
+
+
+org.apache.hadoop.hbase.rest.client
+
+
 
 
 
@@ -196,6 +207,18 @@
 TableDescriptorBuilder.copy(TableDescriptordesc)
 
 
+TableDescriptor
+Table.getDescriptor()
+Gets the table 
descriptor for this table.
+
+
+
+TableDescriptor
+Admin.listTableDescriptor(TableNametableName)
+Method for getting the tableDescriptor
+
+
+
 static TableDescriptor
 TableDescriptorBuilder.parseFrom(byte[]pbBytes)
 The input should be created by TableDescriptorBuilder.toByteArray(org.apache.hadoop.hbase.client.TableDescriptor).
@@ -235,12 +258,56 @@
 
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+Admin.listTableDescriptors()
+List all the userspace tables.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+Admin.listTableDescriptors(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableNametableNames)
+Get tableDescriptors
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+Admin.listTableDescriptors(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
+List all the userspace tables matching the given 
pattern.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+Admin.listTableDescriptors(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern,
+booleanincludeSysTables)
+List all the tables matching the given pattern.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+Admin.listTableDescriptors(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
+List all the userspace tables matching the given regular 
expression.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+Admin.listTableDescriptors(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex,
+booleanincludeSysTables)
+List all the tables matching the given pattern.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+Admin.listTableDescriptorsByNamespace(byte[]name)
+Get list of table descriptors by namespace
+
+
+
 default http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
 AsyncAdmin.listTables()
 List all the userspace tables.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
 AsyncAdmin.listTables(http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">Optionalhttp://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern,
   booleanincludeSysTables)
@@ -267,6 +334,19 @@
 
 
 
+void
+Admin.createTable(TableDescriptordesc)
+Creates a new table.
+
+
+
+void
+Admin.createTable(TableDescriptordesc,
+   byte[][]splitKeys)
+Creates a new table with an initial set of empty regions 
defined by 

[15/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
index 8278dde..da29251 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":18,"i10":6,"i11":18,"i12":6,"i13":6,"i14":6,"i15":18,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":18,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":18,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":18,"i43":6,"i44":6,"i45":6,"i46":18,"i47":6,"i48":18,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":18,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":18,"i64":6,"i65":18,"i66":6,"i67":18,"i68":6,"i69":18,"i70":6,"i71":6,"i72":18,"i73":6,"i74":18,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":18,"i90":18,"i91":6,"i92":6,"i93":18,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":18,"i6":6,"i7":6,"i8":6,"i9":18,"i10":6,"i11":18,"i12":6,"i13":6,"i14":6,"i15":18,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":18,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":18,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":18,"i43":6,"i44":6,"i45":6,"i46":18,"i47":6,"i48":18,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":18,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":18,"i67":6,"i68":18,"i69":6,"i70":18,"i71":6,"i72":18,"i73":6,"i74":6,"i75":18,"i76":6,"i77":18,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":18,"i99":18,"i100":6,"i101":6,"i102":18,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -460,11 +460,29 @@ public interface 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isCatalogJanitorOn()
+Query on the catalog janitor state.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isCleanerChoreOn()
+Query the current state of the cleaner chore.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 isMasterInMaintenanceMode()
 Check whether master is in maintenance mode
 
 
-
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isNormalizerOn()
+Query the current state of the region normalizer
+
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 isProcedureFinished(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringsignature,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringinstance,
@@ -472,17 +490,17 @@ public interface Check the current state of the specified procedure.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in 

[09/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/HTableWrapper.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/HTableWrapper.html 
b/devapidocs/org/apache/hadoop/hbase/client/HTableWrapper.html
index 09eba46..f13140b 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HTableWrapper.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HTableWrapper.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":9,"i15":10,"i16":10,"i17":10,"i18":42,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":42,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":42,"i43":10,"i44":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":9,"i15":10,"i16":10,"i17":10,"i18":42,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":42,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":42,"i44":10,"i45":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -396,74 +396,80 @@ implements 
 
 
+TableDescriptor
+getDescriptor()
+Gets the table 
descriptor for this table.
+
+
+
 TableName
 getName()
 Gets the fully qualified table name instance of this 
table.
 
 
-
+
 int
 getOperationTimeout()
 Get timeout (millisecond) of each operation for in Table 
instance.
 
 
-
+
 int
 getReadRpcTimeout()
 Get timeout (millisecond) of each rpc read request in this 
Table instance.
 
 
-
+
 int
 getRpcTimeout()
 Deprecated.
 
 
-
+
 ResultScanner
 getScanner(byte[]family)
 Gets a scanner on the current table for the given 
family.
 
 
-
+
 ResultScanner
 getScanner(byte[]family,
   byte[]qualifier)
 Gets a scanner on the current table for the given family 
and qualifier.
 
 
-
+
 ResultScanner
 getScanner(Scanscan)
 Returns a scanner on the current table as specified by the 
Scan
  object.
 
 
-
+
 HTableDescriptor
 getTableDescriptor()
 Gets the table descriptor for 
this table.
 
 
-
+
 long
 getWriteBufferSize()
 Returns the maximum size in bytes of the write buffer for 
this HTable.
 
 
-
+
 int
 getWriteRpcTimeout()
 Get timeout (millisecond) of each rpc write request in this 
Table instance.
 
 
-
+
 Result
 increment(Incrementincrement)
 Increments one or more columns within a single row.
 
 
-
+
 long
 incrementColumnValue(byte[]row,
 byte[]family,
@@ -472,7 +478,7 @@ implements See Table.incrementColumnValue(byte[],
 byte[], byte[], long, Durability)
 
 
-
+
 long
 incrementColumnValue(byte[]row,
 byte[]family,
@@ -482,55 +488,55 @@ implements Atomically increments a column value.
 
 
-
+
 void
 internalClose()
 
-
+
 void
 mutateRow(RowMutationsrm)
 Performs multiple mutations atomically on a single 
row.
 
 
-
+
 void
 put(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPutputs)
 Puts some data in the table, in batch.
 
 
-
+
 void
 put(Putput)
 Puts some data in the table.
 
 
-
+
 void
 setOperationTimeout(intoperationTimeout)
 Set timeout (millisecond) of each operation in this Table 
instance, will override the value
  of hbase.client.operation.timeout in configuration.
 
 
-
+
 void
 setReadRpcTimeout(intreadRpcTimeout)
 Set timeout (millisecond) of each rpc read request in 
operations of this Table instance, will
  override the value of hbase.rpc.read.timeout in configuration.
 
 
-
+
 void
 setRpcTimeout(intrpcTimeout)
 Deprecated.
 
 
-
+
 void
 setWriteBufferSize(longwriteBufferSize)
 Sets the size of the buffer in bytes.
 
 
-
+
 void
 setWriteRpcTimeout(intwriteRpcTimeout)
 Set timeout (millisecond) of each rpc write request in 
operations of this Table instance, will
@@ -1238,13 +1244,31 @@ publichttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.
 
 
 
+
+
+
+
+
+getDescriptor
+publicTableDescriptorgetDescriptor()
+  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+Description copied from 
interface:Table
+Gets the table 
descriptor for this table.
+
+Specified by:
+getDescriptorin
 interfaceTable
+Throws:
+http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException - if a remote or 
network exception occurs.
+
+
+
 
 
 
 
 
 getName

[42/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html 
b/apidocs/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
index 31e07d4..0722257 100644
--- a/apidocs/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
+++ b/apidocs/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class LoadIncrementalHFiles
+public class LoadIncrementalHFiles
 extends org.apache.hadoop.conf.Configured
 implements org.apache.hadoop.util.Tool
 Tool to load the output of HFileOutputFormat into an 
existing table.
@@ -392,7 +392,7 @@ implements org.apache.hadoop.util.Tool
 
 
 NAME
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String NAME
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String NAME
 
 See Also:
 Constant
 Field Values
@@ -405,7 +405,7 @@ implements org.apache.hadoop.util.Tool
 
 
 MAX_FILES_PER_REGION_PER_FAMILY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MAX_FILES_PER_REGION_PER_FAMILY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MAX_FILES_PER_REGION_PER_FAMILY
 
 See Also:
 Constant
 Field Values
@@ -418,7 +418,7 @@ implements org.apache.hadoop.util.Tool
 
 
 CREATE_TABLE_CONF_KEY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CREATE_TABLE_CONF_KEY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CREATE_TABLE_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -431,7 +431,7 @@ implements org.apache.hadoop.util.Tool
 
 
 IGNORE_UNMATCHED_CF_CONF_KEY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String IGNORE_UNMATCHED_CF_CONF_KEY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String IGNORE_UNMATCHED_CF_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -444,7 +444,7 @@ implements org.apache.hadoop.util.Tool
 
 
 ALWAYS_COPY_FILES
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ALWAYS_COPY_FILES
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ALWAYS_COPY_FILES
 
 See Also:
 Constant
 Field Values
@@ -465,7 +465,7 @@ implements org.apache.hadoop.util.Tool
 
 
 LoadIncrementalHFiles
-publicLoadIncrementalHFiles(org.apache.hadoop.conf.Configurationconf)
+publicLoadIncrementalHFiles(org.apache.hadoop.conf.Configurationconf)
   throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -487,7 +487,7 @@ implements org.apache.hadoop.util.Tool
 
 
 doBulkLoad
-publicvoiddoBulkLoad(org.apache.hadoop.fs.PathhfofDir,
+publicvoiddoBulkLoad(org.apache.hadoop.fs.PathhfofDir,
Adminadmin,
Tabletable,
RegionLocatorregionLocator)
@@ -514,7 +514,7 @@ implements org.apache.hadoop.util.Tool
 
 
 doBulkLoad
-publicvoiddoBulkLoad(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.Pathmap,
+publicvoiddoBulkLoad(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.Pathmap,
Adminadmin,
Tabletable,
RegionLocatorregionLocator,
@@ -544,7 +544,7 @@ implements org.apache.hadoop.util.Tool
 
 
 doBulkLoad
-publicvoiddoBulkLoad(org.apache.hadoop.fs.PathhfofDir,
+publicvoiddoBulkLoad(org.apache.hadoop.fs.PathhfofDir,
Adminadmin,
Tabletable,

[14/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
index 7816844..f2e7625 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncHBaseAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -464,11 +464,29 @@ implements 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isCatalogJanitorOn()
+Query on the catalog janitor state.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isCleanerChoreOn()
+Query the current state of the cleaner chore.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 isMasterInMaintenanceMode()
 Check whether master is in maintenance mode
 
 
-
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isNormalizerOn()
+Query the current state of the region normalizer
+
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 isProcedureFinished(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringsignature,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringinstance,
@@ -476,13 +494,13 @@ implements Check the current state of the specified procedure.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 isSnapshotFinished(SnapshotDescriptionsnapshot)
 Check the current state of the passed snapshot.
 
 
-
+
 

[07/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
index f403a65..1a2a97a 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i
 109":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i
 
109":10,"i110":10,"i111":10,"i112":10,"i113":10,"i114":10,"i115":10,"i116":10,"i117":10,"i118":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class RawAsyncHBaseAdmin
+public class RawAsyncHBaseAdmin
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements AsyncAdmin
 The implementation of AsyncAdmin.
@@ -708,11 +708,29 @@ implements 
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isCatalogJanitorOn()
+Query on the catalog janitor state.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isCleanerChoreOn()
+Query the current state of the cleaner chore.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
 isMasterInMaintenanceMode()
 Check whether master is in maintenance mode
 
 
-
+
+http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">Boolean
+isNormalizerOn()
+Query the current state of the region normalizer
+
+
+
 http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true;
 title="class or interface in java.util.concurrent">CompletableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 

[17/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
index 7aeab6a..fccfabd 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
@@ -124,9 +124,22 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 HTableDescriptor
 Admin.getTableDescriptor(TableNametableName)
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version.
+ use Admin.listTableDescriptor(TableName)
+
+
+
+
+TableDescriptor
+Admin.listTableDescriptor(TableNametableName)
 Method for getting the tableDescriptor
 
 
+
+TableDescriptor
+HBaseAdmin.listTableDescriptor(TableNametableName)
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
index cecc275..30483eb 100644
--- a/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
@@ -88,11 +88,11 @@
 
 Annotation Type Hierarchy
 
-org.apache.hadoop.hbase.classification.InterfaceStability.Evolving (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
+org.apache.hadoop.hbase.classification.InterfaceStability.Stable (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
+org.apache.hadoop.hbase.classification.InterfaceAudience.Private (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
 org.apache.hadoop.hbase.classification.InterfaceAudience.Public (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
+org.apache.hadoop.hbase.classification.InterfaceStability.Evolving (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
 org.apache.hadoop.hbase.classification.InterfaceAudience.LimitedPrivate (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
-org.apache.hadoop.hbase.classification.InterfaceAudience.Private (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
-org.apache.hadoop.hbase.classification.InterfaceStability.Stable (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
 org.apache.hadoop.hbase.classification.InterfaceStability.Unstable (implements 
java.lang.annotation.http://docs.oracle.com/javase/8/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
 
 



[16/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/Admin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Admin.html 
b/devapidocs/org/apache/hadoop/hbase/client/Admin.html
index 2ebbffe..773961a 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Admin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Admin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":38,"i4":6,"i5":6,"i6":18,"i7":18,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":38,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":18,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":18,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":6,"i69":38,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":18,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6,"i111":18,"i112":18,"i113":18,"i114":6,"i115":6,"i116":6,"i117":6,"i118":6,"i119":6,"
 
i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":6,"i130":6,"i131":6,"i132":6,"i133":6,"i134":6,"i135":6,"i136":6,"i137":38,"i138":6,"i139":6,"i140":38,"i141":6,"i142":6,"i143":6,"i144":6,"i145":6,"i146":6,"i147":6,"i148":6,"i149":6,"i150":6,"i151":18,"i152":18,"i153":6,"i154":6,"i155":6,"i156":6,"i157":6,"i158":6,"i159":6,"i160":6,"i161":6,"i162":6,"i163":6,"i164":6,"i165":6,"i166":6,"i167":6,"i168":6,"i169":6,"i170":6,"i171":6,"i172":6,"i173":6,"i174":6,"i175":6,"i176":6,"i177":6,"i178":6,"i179":6,"i180":6,"i181":6,"i182":6,"i183":6,"i184":6,"i185":18};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":38,"i4":6,"i5":6,"i6":18,"i7":18,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":50,"i33":50,"i34":50,"i35":6,"i36":6,"i37":6,"i38":50,"i39":6,"i40":38,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":38,"i52":38,"i53":6,"i54":6,"i55":18,"i56":6,"i57":6,"i58":6,"i59":38,"i60":38,"i61":6,"i62":6,"i63":18,"i64":6,"i65":6,"i66":6,"i67":38,"i68":38,"i69":6,"i70":6,"i71":6,"i72":6,"i73":38,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":18,"i92":6,"i93":38,"i94":38,"i95":38,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":6,"i113":6,"i114":6,"i115":18,"i116":18,"i117":18,"i118"
 
:6,"i119":6,"i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":38,"i130":6,"i131":6,"i132":6,"i133":6,"i134":6,"i135":6,"i136":38,"i137":38,"i138":38,"i139":38,"i140":38,"i141":6,"i142":6,"i143":6,"i144":6,"i145":6,"i146":6,"i147":6,"i148":6,"i149":38,"i150":6,"i151":6,"i152":38,"i153":6,"i154":6,"i155":6,"i156":6,"i157":6,"i158":38,"i159":6,"i160":38,"i161":6,"i162":6,"i163":6,"i164":6,"i165":18,"i166":18,"i167":6,"i168":6,"i169":6,"i170":6,"i171":6,"i172":6,"i173":6,"i174":6,"i175":6,"i176":6,"i177":6,"i178":6,"i179":6,"i180":6,"i181":6,"i182":6,"i183":6,"i184":6,"i185":6,"i186":6,"i187":6,"i188":6,"i189":6,"i190":6,"i191":6,"i192":6,"i193":6,"i194":6,"i195":6,"i196":6,"i197":6,"i198":6,"i199":18};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -363,35 +363,76 @@ extends 
-void
+default void
 createTable(HTableDescriptordesc)
-Creates a new table.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version.
+ use createTable(TableDescriptor)
+
 
 
 
-void
+default void
 createTable(HTableDescriptordesc,
byte[][]splitKeys)
-Creates a new table with an initial set of empty regions 
defined by the specified split keys.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version.
+ use createTable(TableDescriptor,
 byte[][])
+
 
 
 
-void
+default void
 createTable(HTableDescriptordesc,
byte[]startKey,
byte[]endKey,
intnumRegions)
-Creates a new table with the specified number of 
regions.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version.
+ use createTable(TableDescriptor,
 byte[], byte[], int)
+
 
 
 

[21/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
index 85f763f..850c918 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
@@ -5059,16 +5059,16 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 static HDFSBlocksDistribution
-HRegion.computeHDFSBlocksDistribution(org.apache.hadoop.conf.Configurationconf,
- HTableDescriptortableDescriptor,
+HRegion.computeHDFSBlocksDistribution(org.apache.hadoop.conf.Configurationconf,
+ TableDescriptortableDescriptor,
  HRegionInforegionInfo)
 This is a helper function to compute HDFS block 
distribution on demand
 
 
 
 static HDFSBlocksDistribution
-HRegion.computeHDFSBlocksDistribution(org.apache.hadoop.conf.Configurationconf,
- HTableDescriptortableDescriptor,
+HRegion.computeHDFSBlocksDistribution(org.apache.hadoop.conf.Configurationconf,
+ TableDescriptortableDescriptor,
  HRegionInforegionInfo,
  org.apache.hadoop.fs.PathtablePath)
 This is a helper function to compute HDFS block 
distribution on demand
@@ -5090,18 +5090,18 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 static HRegion
-HRegion.createHRegion(HRegionInfoinfo,
+HRegion.createHRegion(HRegionInfoinfo,
  org.apache.hadoop.fs.PathrootDir,
  org.apache.hadoop.conf.Configurationconf,
- HTableDescriptorhTableDescriptor,
+ TableDescriptorhTableDescriptor,
  WALwal)
 
 
 static HRegion
-HRegion.createHRegion(HRegionInfoinfo,
+HRegion.createHRegion(HRegionInfoinfo,
  org.apache.hadoop.fs.PathrootDir,
  org.apache.hadoop.conf.Configurationconf,
- HTableDescriptorhTableDescriptor,
+ TableDescriptorhTableDescriptor,
  WALwal,
  booleaninitialize)
 Convenience method creating new HRegions.
@@ -5200,12 +5200,12 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 (package private) static HRegion
-HRegion.newHRegion(org.apache.hadoop.fs.PathtableDir,
+HRegion.newHRegion(org.apache.hadoop.fs.PathtableDir,
   WALwal,
   org.apache.hadoop.fs.FileSystemfs,
   org.apache.hadoop.conf.Configurationconf,
   HRegionInforegionInfo,
-  HTableDescriptorhtd,
+  TableDescriptorhtd,
   RegionServerServicesrsServices)
 A utility method to create new instances of HRegion based 
on the
  HConstants.REGION_IMPL
 configuration property.
@@ -5213,22 +5213,22 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 static HRegion
-HRegion.openHRegion(org.apache.hadoop.conf.Configurationconf,
+HRegion.openHRegion(org.apache.hadoop.conf.Configurationconf,
org.apache.hadoop.fs.FileSystemfs,
org.apache.hadoop.fs.PathrootDir,
HRegionInfoinfo,
-   HTableDescriptorhtd,
+   TableDescriptorhtd,
WALwal)
 Open a Region.
 
 
 
 static HRegion
-HRegion.openHRegion(org.apache.hadoop.conf.Configurationconf,
+HRegion.openHRegion(org.apache.hadoop.conf.Configurationconf,
org.apache.hadoop.fs.FileSystemfs,
org.apache.hadoop.fs.PathrootDir,
HRegionInfoinfo,
-   HTableDescriptorhtd,
+   TableDescriptorhtd,
WALwal,
RegionServerServicesrsServices,
CancelableProgressablereporter)
@@ -5237,12 +5237,12 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 static HRegion
-HRegion.openHRegion(org.apache.hadoop.conf.Configurationconf,
+HRegion.openHRegion(org.apache.hadoop.conf.Configurationconf,
org.apache.hadoop.fs.FileSystemfs,
org.apache.hadoop.fs.PathrootDir,
org.apache.hadoop.fs.PathtableDir,
HRegionInfoinfo,
-   HTableDescriptorhtd,
+   TableDescriptorhtd,
WALwal,
RegionServerServicesrsServices,
CancelableProgressablereporter)
@@ -5251,8 +5251,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static HRegion
-HRegion.openHRegion(HRegionInfoinfo,
-   HTableDescriptorhtd,
+HRegion.openHRegion(HRegionInfoinfo,
+   TableDescriptorhtd,
WALwal,
org.apache.hadoop.conf.Configurationconf)
 Open a Region.
@@ -5260,8 +5260,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static HRegion

[03/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
index ef45d65..ce7419d 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/TableDescriptor.html
@@ -92,6 +92,27 @@
 Provides HBase Client
 
 
+
+org.apache.hadoop.hbase.constraint
+
+Restrict the domain of a data attribute, often times to 
fulfill business rules/requirements.
+
+
+
+org.apache.hadoop.hbase.mapreduce
+
+Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce;>MapReduce
+Input/OutputFormats, a table indexing MapReduce job, and utility methods.
+
+
+
+org.apache.hadoop.hbase.regionserver
+
+
+
+org.apache.hadoop.hbase.rest.client
+
+
 
 
 
@@ -166,6 +187,10 @@
 
 
 
+private TableDescriptor
+HBaseAdmin.CreateTableFuture.desc
+
+
 static TableDescriptor
 TableDescriptorBuilder.NAMESPACE_TABLEDESC
 Table descriptor for namespace table
@@ -202,6 +227,47 @@
 TableDescriptorBuilder.copy(TableDescriptordesc)
 
 
+TableDescriptor
+Table.getDescriptor()
+Gets the table 
descriptor for this table.
+
+
+
+TableDescriptor
+HTable.getDescriptor()
+
+
+TableDescriptor
+HTableWrapper.getDescriptor()
+
+
+protected TableDescriptor
+HBaseAdmin.CreateTableFuture.getTableDescriptor()
+
+
+protected TableDescriptor
+HBaseAdmin.TableFuture.getTableDescriptor()
+
+
+(package private) static TableDescriptor
+HBaseAdmin.getTableDescriptor(TableNametableName,
+  Connectionconnection,
+  RpcRetryingCallerFactoryrpcCallerFactory,
+  RpcControllerFactoryrpcControllerFactory,
+  intoperationTimeout,
+  intrpcTimeout)
+
+
+TableDescriptor
+Admin.listTableDescriptor(TableNametableName)
+Method for getting the tableDescriptor
+
+
+
+TableDescriptor
+HBaseAdmin.listTableDescriptor(TableNametableName)
+
+
 static TableDescriptor
 TableDescriptorBuilder.parseFrom(byte[]pbBytes)
 The input should be created by TableDescriptorBuilder.toByteArray(org.apache.hadoop.hbase.client.TableDescriptor).
@@ -283,6 +349,80 @@
 RawAsyncHBaseAdmin.getTableDescriptor(TableNametableName)
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+Admin.listTableDescriptors()
+List all the userspace tables.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+HBaseAdmin.listTableDescriptors()
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+Admin.listTableDescriptors(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableNametableNames)
+Get tableDescriptors
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+HBaseAdmin.listTableDescriptors(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableNametableNames)
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+Admin.listTableDescriptors(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
+List all the userspace tables matching the given 
pattern.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+HBaseAdmin.listTableDescriptors(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+Admin.listTableDescriptors(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern,
+booleanincludeSysTables)
+List all the tables matching the given pattern.
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableDescriptor
+HBaseAdmin.listTableDescriptors(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern,
+booleanincludeSysTables)
+

[44/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/org/apache/hadoop/hbase/client/Table.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Table.html 
b/apidocs/org/apache/hadoop/hbase/client/Table.html
index 19a4b6c..f35e800 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Table.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Table.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":38,"i25":6,"i26":6,"i27":6,"i28":6,"i29":38,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":38,"i38":38,"i39":38,"i40":38,"i41":38};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":38,"i26":6,"i27":6,"i28":6,"i29":38,"i30":38,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":38,"i39":38,"i40":38,"i41":38,"i42":38};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -327,24 +327,30 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
+TableDescriptor
+getDescriptor()
+Gets the table 
descriptor for this table.
+
+
+
 TableName
 getName()
 Gets the fully qualified table name instance of this 
table.
 
 
-
+
 int
 getOperationTimeout()
 Get timeout (millisecond) of each operation for in Table 
instance.
 
 
-
+
 int
 getReadRpcTimeout()
 Get timeout (millisecond) of each rpc read request in this 
Table instance.
 
 
-
+
 int
 getRpcTimeout()
 Deprecated.
@@ -352,33 +358,36 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 ResultScanner
 getScanner(byte[]family)
 Gets a scanner on the current table for the given 
family.
 
 
-
+
 ResultScanner
 getScanner(byte[]family,
   byte[]qualifier)
 Gets a scanner on the current table for the given family 
and qualifier.
 
 
-
+
 ResultScanner
 getScanner(Scanscan)
 Returns a scanner on the current table as specified by the 
Scan
  object.
 
 
-
+
 HTableDescriptor
 getTableDescriptor()
-Gets the table descriptor for 
this table.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version.
+ use getDescriptor()
+
 
 
-
+
 long
 getWriteBufferSize()
 Deprecated.
@@ -386,19 +395,19 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 int
 getWriteRpcTimeout()
 Get timeout (millisecond) of each rpc write request in this 
Table instance.
 
 
-
+
 Result
 increment(Incrementincrement)
 Increments one or more columns within a single row.
 
 
-
+
 long
 incrementColumnValue(byte[]row,
 byte[]family,
@@ -407,7 +416,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 See incrementColumnValue(byte[],
 byte[], byte[], long, Durability)
 
 
-
+
 long
 incrementColumnValue(byte[]row,
 byte[]family,
@@ -417,25 +426,25 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 Atomically increments a column value.
 
 
-
+
 void
 mutateRow(RowMutationsrm)
 Performs multiple mutations atomically on a single 
row.
 
 
-
+
 void
 put(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPutputs)
 Puts some data in the table, in batch.
 
 
-
+
 void
 put(Putput)
 Puts some data in the table.
 
 
-
+
 void
 setOperationTimeout(intoperationTimeout)
 Deprecated.
@@ -443,7 +452,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 void
 setReadRpcTimeout(intreadRpcTimeout)
 Deprecated.
@@ -451,7 +460,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 void
 setRpcTimeout(intrpcTimeout)
 Deprecated.
@@ -459,7 +468,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 void
 setWriteBufferSize(longwriteBufferSize)
 Deprecated.
@@ -468,7 +477,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 
-
+
 void
 setWriteRpcTimeout(intwriteRpcTimeout)
 Deprecated.
@@ -520,8 +529,11 @@ extends http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html
 
 
 getTableDescriptor
-HTableDescriptorgetTableDescriptor()
- throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
+HTableDescriptorgetTableDescriptor()
+   

[02/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/package-use.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-use.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-use.html
index 5730b09..2ef70e4 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-use.html
@@ -275,56 +275,60 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-org.apache.hadoop.hbase.security.access
+org.apache.hadoop.hbase.security
 
 
 
-org.apache.hadoop.hbase.security.token
+org.apache.hadoop.hbase.security.access
 
 
 
-org.apache.hadoop.hbase.security.visibility
+org.apache.hadoop.hbase.security.token
 
 
 
-org.apache.hadoop.hbase.snapshot
+org.apache.hadoop.hbase.security.visibility
 
 
 
+org.apache.hadoop.hbase.snapshot
+
+
+
 org.apache.hadoop.hbase.thrift
 
 Provides an HBase http://incubator.apache.org/thrift/;>Thrift
 service.
 
 
-
+
 org.apache.hadoop.hbase.thrift2
 
 Provides an HBase http://thrift.apache.org/;>Thrift
 service.
 
 
-
+
 org.apache.hadoop.hbase.tool
 
 
-
+
 org.apache.hadoop.hbase.util
 
 
-
+
 org.apache.hadoop.hbase.util.hbck
 
 
-
+
 org.apache.hadoop.hbase.wal
 
 
-
+
 org.apache.hadoop.hbase.zookeeper
 
 
-
+
 org.apache.hbase.archetypes.exemplars.client
 
 This package provides fully-functional exemplar Java code 
demonstrating
@@ -1614,6 +1618,14 @@ service.
 Used to perform Put operations for a single row.
 
 
+
+TableDescriptor
+TableDescriptor contains the details about an HBase table 
such as the descriptors of
+ all the column families, is the table a catalog table,  hbase:meta 
,
+ if the table is read only, the maximum size of the memstore,
+ when the region split should occur, coprocessors associated with it 
etc...
+
+
 
 
 
@@ -1861,60 +1873,74 @@ service.
 
 
 
+ColumnFamilyDescriptor
+An ColumnFamilyDescriptor contains information about a 
column family such as the
+ number of versions, compression settings, etc.
+
+
+
 Connection
 A cluster connection encapsulating lower level individual 
connections to actual servers and
  a connection to zookeeper.
 
 
-
+
 Delete
 Used to perform Delete operations on a single row.
 
 
-
+
 Durability
 Enum describing the durability guarantees for tables and Mutations
  Note that the items must be sorted in order of increasing durability
 
 
-
+
 Mutation
 
-
+
 Put
 Used to perform Put operations for a single row.
 
 
-
+
 RegionLocator
 Used to view region location information for a single HBase 
table.
 
 
-
+
 Result
 Single row result of a Get or Scan query.
 
 
-
+
 ResultScanner
 Interface for client-side scanning.
 
 
-
+
 Scan
 Used to perform Scan operations.
 
 
-
+
 SecureBulkLoadClient
 Client proxy for SecureBulkLoadProtocol
 
 
-
+
 Table
 Used to communicate with a single HBase table.
 
 
+
+TableDescriptor
+TableDescriptor contains the details about an HBase table 
such as the descriptors of
+ all the column families, is the table a catalog table,  hbase:meta 
,
+ if the table is read only, the maximum size of the memstore,
+ when the region split should occur, coprocessors associated with it 
etc...
+
+
 
 
 
@@ -2071,11 +2097,17 @@ service.
 
 
 
+ColumnFamilyDescriptor
+An ColumnFamilyDescriptor contains information about a 
column family such as the
+ number of versions, compression settings, etc.
+
+
+
 MobCompactPartitionPolicy
 Enum describing the mob compact partition policy 
types.
 
 
-
+
 Scan
 Used to perform Scan operations.
 
@@ -2205,86 +2237,100 @@ service.
 
 
 
+ColumnFamilyDescriptor
+An ColumnFamilyDescriptor contains information about a 
column family such as the
+ number of versions, compression settings, etc.
+
+
+
 Connection
 A cluster connection encapsulating lower level individual 
connections to actual servers and
  a connection to zookeeper.
 
 
-
+
 Delete
 Used to perform Delete operations on a single row.
 
 
-
+
 Durability
 Enum describing the durability guarantees for tables and Mutations
  Note that the items must be sorted in order of increasing durability
 
 
-
+
 Get
 Used to perform Get operations on a single row.
 
 
-
+
 Increment
 Used to perform Increment operations on a single row.
 
 
-
+
 IsolationLevel
 Specify Isolation levels in Scan operations.
 
 
-
+
 Mutation
 
-
+
 Operation
 Superclass for any type that maps to a potentially 
application-level query.
 
 
-
+
 OperationWithAttributes
 
-
+
 Put
 Used to perform Put operations for a single row.
 
 
-
+
 Query
 
-
+
 Result
 Single row result of a Get or Scan query.
 
 
-
+
 RowMutations
 Performs multiple mutations atomically on a single 
row.
 
 
-
+
 RowTooBigException
 Gets or Scans throw this exception if running without 
in-row scan flag
  set and row size appears to exceed max configured size (configurable via
  hbase.table.max.rowsize).
 
 
-
+
 

[26/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index d0dbe7d..450f1d8 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -289,7 +289,7 @@
 2242
 0
 0
-14711
+14778
 
 Files
 
@@ -752,7 +752,7 @@
 org/apache/hadoop/hbase/client/Admin.java
 0
 0
-78
+94
 
 org/apache/hadoop/hbase/client/Append.java
 0
@@ -762,7 +762,7 @@
 org/apache/hadoop/hbase/client/AsyncAdmin.java
 0
 0
-12
+15
 
 org/apache/hadoop/hbase/client/AsyncAdminBuilder.java
 0
@@ -952,7 +952,7 @@
 org/apache/hadoop/hbase/client/HBaseAdmin.java
 0
 0
-80
+94
 
 org/apache/hadoop/hbase/client/HRegionLocator.java
 0
@@ -1217,7 +1217,7 @@
 org/apache/hadoop/hbase/client/Table.java
 0
 0
-17
+18
 
 org/apache/hadoop/hbase/client/TableDescriptor.java
 0
@@ -1477,7 +1477,7 @@
 org/apache/hadoop/hbase/constraint/ConstraintProcessor.java
 0
 0
-2
+3
 
 org/apache/hadoop/hbase/constraint/Constraints.java
 0
@@ -2742,7 +2742,7 @@
 org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
 0
 0
-20
+24
 
 org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java
 0
@@ -3527,557 +3527,567 @@
 org/apache/hadoop/hbase/mob/DefaultMobStoreCompactor.java
 0
 0
-5
+6
 
 org/apache/hadoop/hbase/mob/DefaultMobStoreFlusher.java
 0
 0
-2
+4
 
+org/apache/hadoop/hbase/mob/MobCacheConfig.java
+0
+0
+1
+
 org/apache/hadoop/hbase/mob/MobFile.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/mob/MobFileCache.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/mob/MobUtils.java
 0
 0
 12
-
+
 org/apache/hadoop/hbase/mob/compactions/MobCompactor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactionRequest.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/mob/compactions/PartitionedMobCompactor.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/monitoring/LogMonitoring.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/monitoring/MemoryBoundedLogMessageBuffer.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/monitoring/MonitoredTaskImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/monitoring/StateDumpServlet.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/monitoring/TaskMonitor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/monitoring/ThreadMonitoring.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/namespace/NamespaceStateManager.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/namespace/NamespaceTableAndRegionInfo.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/net/Address.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/nio/ByteBuff.java
 0
 0
 24
-
+
 org/apache/hadoop/hbase/nio/MultiByteBuff.java
 0
 0
 30
-
+
 org/apache/hadoop/hbase/nio/SingleByteBuff.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/procedure/MasterProcedureManager.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/procedure/Procedure.java
 0
 0
 14
-
+
 org/apache/hadoop/hbase/procedure/ProcedureCoordinator.java
 0
 0
 12
-
+
 org/apache/hadoop/hbase/procedure/ProcedureCoordinatorRpcs.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/procedure/ProcedureManagerHost.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/procedure/ProcedureMember.java
 0
 0
 17
-
+
 org/apache/hadoop/hbase/procedure/ProcedureMemberRpcs.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/procedure/RegionServerProcedureManager.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/procedure/Subprocedure.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/procedure/ZKProcedureCoordinator.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/procedure/ZKProcedureMemberRpcs.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/procedure/ZKProcedureUtil.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/procedure/flush/MasterFlushTableProcedureManager.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/procedure/flush/RegionServerFlushTableProcedureManager.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/procedure2/AbstractProcedureScheduler.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/procedure2/BadProcedureException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/procedure2/LockAndQueue.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/procedure2/Procedure.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/procedure2/ProcedureDeque.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
 0
 0
 25
-
+
 org/apache/hadoop/hbase/procedure2/ProcedureUtil.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/procedure2/RemoteProcedureException.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/procedure2/RootProcedureState.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/procedure2/SimpleProcedureScheduler.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/procedure2/StateMachineProcedure.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/procedure2/store/ProcedureStore.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/procedure2/store/ProcedureStoreBase.java
 0
 0
 1
-
+
 

[29/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.html
index 65a78e7..664869e 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/MultiTableHFileOutputFormat.html
@@ -29,106 +29,105 @@
 021import 
com.google.common.annotations.VisibleForTesting;
 022import org.apache.commons.logging.Log;
 023import 
org.apache.commons.logging.LogFactory;
-024import 
org.apache.hadoop.hbase.HTableDescriptor;
-025import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-026import 
org.apache.hadoop.hbase.client.RegionLocator;
-027import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-028import 
org.apache.hadoop.hbase.util.Bytes;
-029import org.apache.hadoop.mapreduce.Job;
-030
-031import java.io.IOException;
-032import java.nio.charset.Charset;
-033import java.util.List;
-034
-035/**
-036 * Create 3 level tree directory, first 
level is using table name as parent
-037 * directory and then use family name as 
child directory, and all related HFiles
-038 * for one family are under child 
directory
-039 * -tableName1
-040 * -columnFamilyName1
-041 * -columnFamilyName2
-042 * -HFiles
-043 * -tableName2
-044 * -columnFamilyName1
-045 * -HFiles
-046 * -columnFamilyName2
-047 */
-048@InterfaceAudience.Public
-049@VisibleForTesting
-050public class MultiTableHFileOutputFormat 
extends HFileOutputFormat2 {
-051  private static final Log LOG = 
LogFactory.getLog(MultiTableHFileOutputFormat.class);
-052
-053  /**
-054   * Creates a composite key to use as a 
mapper output key when using
-055   * 
MultiTableHFileOutputFormat.configureIncrementaLoad to set up bulk ingest job
-056   *
-057   * @param tableName Name of the Table - 
Eg: TableName.getNameAsString()
-058   * @param suffixUsually represents 
a rowkey when creating a mapper key or column family
-059   * @return  byte[] 
representation of composite key
-060   */
-061  public static byte[] 
createCompositeKey(byte[] tableName,
-062  
byte[] suffix) {
-063return 
combineTableNameSuffix(tableName, suffix);
-064  }
-065
-066  /**
-067   * Alternate api which accepts an 
ImmutableBytesWritable for the suffix
-068   * @see 
MultiTableHFileOutputFormat#createCompositeKey(byte[], byte[])
-069   */
-070  public static byte[] 
createCompositeKey(byte[] tableName,
-071  
ImmutableBytesWritable suffix) {
-072return 
combineTableNameSuffix(tableName, suffix.get());
-073  }
-074
-075  /**
-076   * Alternate api which accepts a String 
for the tableName and ImmutableBytesWritable for the
-077   * suffix
-078   * @see 
MultiTableHFileOutputFormat#createCompositeKey(byte[], byte[])
-079   */
-080  public static byte[] 
createCompositeKey(String tableName,
-081  
ImmutableBytesWritable suffix) {
-082return 
combineTableNameSuffix(tableName.getBytes(Charset.forName("UTF-8")), 
suffix.get());
-083  }
-084
-085  /**
-086   * Analogous to
-087   * {@link 
HFileOutputFormat2#configureIncrementalLoad(Job, HTableDescriptor, 
RegionLocator)},
-088   * this function will configure the 
requisite number of reducers to write HFiles for multple
-089   * tables simultaneously
-090   *
-091   * @param job   See 
{@link org.apache.hadoop.mapreduce.Job}
-092   * @param multiTableDescriptors Table 
descriptor and region locator pairs
-093   * @throws IOException
-094   */
-095  public static void 
configureIncrementalLoad(Job job, ListTableInfo
-096  multiTableDescriptors)
-097  throws IOException {
-098
MultiTableHFileOutputFormat.configureIncrementalLoad(job, 
multiTableDescriptors,
-099
MultiTableHFileOutputFormat.class);
-100  }
-101
-102  final private static int 
validateCompositeKey(byte[] keyBytes) {
-103
-104int separatorIdx = 
Bytes.indexOf(keyBytes, HFileOutputFormat2.tableSeparator);
-105
-106// Either the separator was not found 
or a tablename wasn't present or a key wasn't present
-107if (separatorIdx == -1) {
-108  throw new 
IllegalArgumentException("Invalid format for composite key [" + Bytes
-109  .toStringBinary(keyBytes) + 
"]. Cannot extract tablename and suffix from key");
-110}
-111return separatorIdx;
-112  }
-113
-114  protected static byte[] 
getTableName(byte[] keyBytes) {
-115int separatorIdx = 
validateCompositeKey(keyBytes);
-116return Bytes.copy(keyBytes, 0, 
separatorIdx);
-117  }
-118
-119  protected static byte[] 
getSuffix(byte[] keyBytes) {
-120int 

[27/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/book.html
--
diff --git a/book.html b/book.html
index 9eeedb5..3cf6127 100644
--- a/book.html
+++ b/book.html
@@ -1381,11 +1381,10 @@ To check for well-formedness and only print output if 
errors exist, use the comm
 
 Keep Configuration In Sync Across the Cluster
 
-When running in distributed mode, after you make an edit to an HBase 
configuration, make sure you copy the content of the conf/ directory 
to all nodes of the cluster.
+When running in distributed mode, after you make an edit to an HBase 
configuration, make sure you copy the contents of the conf/ directory 
to all nodes of the cluster.
 HBase will not do this for you.
 Use rsync, scp, or another secure mechanism for 
copying the configuration files to your nodes.
-For most configuration, a restart is needed for servers to pick up changes An 
exception is dynamic configuration.
-to be described later below.
+For most configurations, a restart is needed for servers to pick up changes. 
Dynamic configuration is an exception to this, to be described later below.
 
 
 
@@ -1473,12 +1472,12 @@ You must set JAVA_HOME on each node of 
your cluster. hbase-env.
 
 Loopback IP
 
-Prior to hbase-0.96.0, HBase only used the IP address 
127.0.0.1 to refer to localhost, and this could not 
be configured.
+Prior to hbase-0.96.0, HBase only used the IP address 
127.0.0.1 to refer to localhost, and this was not 
configurable.
 See Loopback IP for more details.
 
 NTP
 
-The clocks on cluster nodes should be synchronized. A small amount of 
variation is acceptable, but larger amounts of skew can cause erratic and 
unexpected behavior. Time synchronization is one of the first things to check 
if you see unexplained problems in your cluster. It is recommended that you run 
a Network Time Protocol (NTP) service, or another time-synchronization 
mechanism, on your cluster, and that all nodes look to the same service for 
time synchronization. See the http://www.tldp.org/LDP/sag/html/basic-ntp-config.html;>Basic NTP 
Configuration at The Linux Documentation Project 
(TLDP) to set up NTP.
+The clocks on cluster nodes should be synchronized. A small amount of 
variation is acceptable, but larger amounts of skew can cause erratic and 
unexpected behavior. Time synchronization is one of the first things to check 
if you see unexplained problems in your cluster. It is recommended that you run 
a Network Time Protocol (NTP) service, or another time-synchronization 
mechanism on your cluster and that all nodes look to the same service for time 
synchronization. See the http://www.tldp.org/LDP/sag/html/basic-ntp-config.html;>Basic NTP 
Configuration at The Linux Documentation Project 
(TLDP) to set up NTP.
 
 
 
@@ -1540,8 +1539,8 @@ hadoop  -   nproc   32000
 
 Windows
 
-Prior to HBase 0.96, testing for running HBase on Microsoft Windows was 
limited.
-Running a on Windows nodes is not recommended for production systems.
+Prior to HBase 0.96, running HBase on Microsoft Windows was limited only 
for testing purposes.
+Running production systems on Windows machines is not recommended.
 
 
 
@@ -1774,8 +1773,8 @@ data loss. This patch is present in Apache Hadoop 
releases 2.6.1+.
 The bundled jar is ONLY for use in standalone mode.
 In distributed mode, it is critical that the version of Hadoop that 
is out on your cluster match what is under HBase.
 Replace the hadoop jar found in the HBase lib directory with the hadoop jar 
you are running on your cluster to avoid version mismatch issues.
-Make sure you replace the jar in HBase everywhere on your cluster.
-Hadoop version mismatch issues have various manifestations but often all looks 
like its hung up.
+Make sure you replace the jar in HBase across your whole cluster.
+Hadoop version mismatch issues have various manifestations but often all look 
like its hung.
 
 
 
@@ -1860,7 +1859,7 @@ HDFS where data is replicated ensures the latter.
 
 
 To configure this standalone variant, edit your hbase-site.xml
-setting the hbase.rootdir to point at a directory in your
+setting hbase.rootdir  to point at a directory in your
 HDFS instance but then set hbase.cluster.distributed
 to false. For example:
 
@@ -1912,8 +1911,8 @@ Some of the information that was originally in this 
section has been moved there
 
 
 A pseudo-distributed mode is simply a fully-distributed mode run on a 
single host.
-Use this configuration testing and prototyping on HBase.
-Do not use this configuration for production nor for evaluating HBase 
performance.
+Use this HBase configuration for testing and prototyping purposes only.
+Do not use this configuration for production or for performance evaluation.
 
 
 
@@ -1922,11 +1921,11 @@ Do not use this configuration for production nor for 
evaluating HBase performanc
 
 By default, HBase runs in standalone mode.
 Both standalone mode and pseudo-distributed mode are 

[10/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/client/HTable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/HTable.html 
b/devapidocs/org/apache/hadoop/hbase/client/HTable.html
index 95346a7..1ac915d 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HTable.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":9,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":9,"i29":10,"i30":10,"i31":9,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":42,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":42,"i53":42,"i54":42,"i55":42,"i56":42,"i57":10,"i58":10,"i59":9};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":9,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":9,"i29":10,"i30":10,"i31":10,"i32":9,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":42,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":42,"i54":42,"i55":42,"i56":42,"i57":42,"i58":10,"i59":10,"i60":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -525,6 +525,12 @@ implements getDefaultExecutor(org.apache.hadoop.conf.Configurationconf)
 
 
+TableDescriptor
+getDescriptor()
+Gets the table 
descriptor for this table.
+
+
+
 private Pairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocation
 getKeysAndRegionsInRange(byte[]startKey,
 byte[]endKey,
@@ -533,7 +539,7 @@ implements 
 
 
-
+
 private Pairhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocation
 getKeysAndRegionsInRange(byte[]startKey,
 byte[]endKey,
@@ -543,93 +549,93 @@ implements 
 
 
-
+
 static int
 getMaxKeyValueSize(org.apache.hadoop.conf.Configurationconf)
 
-
+
 TableName
 getName()
 Gets the fully qualified table name instance of this 
table.
 
 
-
+
 int
 getOperationTimeout()
 Get timeout (millisecond) of each operation for in Table 
instance.
 
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorService
 getPool()
 The pool is used for mutli requests for this HTable
 
 
-
+
 int
 getReadRpcTimeout()
 Get timeout (millisecond) of each rpc read request in this 
Table instance.
 
 
-
+
 RegionLocator
 getRegionLocator()
 
-
+
 int
 getRpcTimeout()
 Deprecated.
 
 
-
+
 ResultScanner
 getScanner(byte[]family)
 The underlying HTable must not be 
closed.
 
 
-
+
 ResultScanner
 getScanner(byte[]family,
   byte[]qualifier)
 The underlying HTable must not be 
closed.
 
 
-
+
 ResultScanner
 getScanner(Scanscan)
 The underlying HTable must not be 
closed.
 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listbyte[]
 getStartKeysInRange(byte[]start,
byte[]end)
 
-
+
 HTableDescriptor
 getTableDescriptor()
 Gets the table descriptor for 
this table.
 
 
-
+
 long
 getWriteBufferSize()
 Returns the maximum size in bytes of the write buffer for 
this HTable.
 
 
-
+
 int
 getWriteRpcTimeout()
 Get timeout (millisecond) of each rpc write request in this 
Table instance.
 
 
-
+
 Result
 increment(Incrementincrement)
 Increments one or more columns within a single row.
 
 
-
+
 long
 incrementColumnValue(byte[]row,
 byte[]family,
@@ -638,7 +644,7 @@ implements See Table.incrementColumnValue(byte[],
 byte[], byte[], long, Durability)
 
 
-
+
 long
 incrementColumnValue(byte[]row,
 byte[]family,
@@ -648,13 +654,13 @@ implements Atomically increments a column value.
 
 
-
+
 void
 mutateRow(RowMutationsrm)
 Performs multiple mutations atomically on a single 
row.
 
 
-
+
 Rvoid
 

[36/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptor.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptor.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptor.html
index f6addc4..0b2d6a5 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptor.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptor.html
@@ -216,77 +216,85 @@
 208  byte[] getValue(byte[] key);
 209
 210  /**
-211   * @return Getter for fetching an 
unmodifiable map.
-212   */
-213  MapBytes, Bytes getValues();
-214
-215  /**
-216   * Check if the table has an attached 
co-processor represented by the name
-217   * className
-218   *
-219   * @param classNameToMatch - Class name 
of the co-processor
-220   * @return true of the table has a 
co-processor className
-221   */
-222  boolean hasCoprocessor(String 
classNameToMatch);
-223
-224  /**
-225   * Checks to see if this table contains 
the given column family
+211   * Getter for accessing the metadata 
associated with the key.
+212   *
+213   * @param key The key.
+214   * @return Null if no mapping for the 
key
+215   */
+216  String getValue(String key);
+217
+218  /**
+219   * @return Getter for fetching an 
unmodifiable map.
+220   */
+221  MapBytes, Bytes getValues();
+222
+223  /**
+224   * Check if the table has an attached 
co-processor represented by the name
+225   * className
 226   *
-227   * @param name Family name or column 
name.
-228   * @return true if the table contains 
the specified family name
+227   * @param classNameToMatch - Class name 
of the co-processor
+228   * @return true of the table has a 
co-processor className
 229   */
-230  boolean hasColumnFamily(final byte[] 
name);
+230  boolean hasCoprocessor(String 
classNameToMatch);
 231
 232  /**
-233   * @return true if the read-replicas 
memstore replication is enabled.
-234   */
-235  boolean 
hasRegionMemstoreReplication();
-236
-237  /**
-238   * @return true if there are at least 
one cf whose replication scope is
-239   * serial.
-240   */
-241  boolean hasSerialReplicationScope();
-242
-243  /**
-244   * Check if the compaction enable flag 
of the table is true. If flag is false
-245   * then no minor/major compactions will 
be done in real.
-246   *
-247   * @return true if table compaction 
enabled
+233   * Checks to see if this table contains 
the given column family
+234   *
+235   * @param name Family name or column 
name.
+236   * @return true if the table contains 
the specified family name
+237   */
+238  boolean hasColumnFamily(final byte[] 
name);
+239
+240  /**
+241   * @return true if the read-replicas 
memstore replication is enabled.
+242   */
+243  boolean 
hasRegionMemstoreReplication();
+244
+245  /**
+246   * @return true if there are at least 
one cf whose replication scope is
+247   * serial.
 248   */
-249  boolean isCompactionEnabled();
+249  boolean hasSerialReplicationScope();
 250
 251  /**
-252   * Checks if this table is code 
hbase:meta /code region.
-253   *
-254   * @return true if this table is 
code hbase:meta /code region
-255   */
-256  boolean isMetaRegion();
-257
-258  /**
-259   * Checks if the table is a 
codehbase:meta/code table
-260   *
-261   * @return true if table is 
code hbase:meta /code region.
-262   */
-263  boolean isMetaTable();
-264
-265  /**
-266   * Check if normalization enable flag 
of the table is true. If flag is false
-267   * then no region normalizer won't 
attempt to normalize this table.
+252   * Check if the compaction enable flag 
of the table is true. If flag is false
+253   * then no minor/major compactions will 
be done in real.
+254   *
+255   * @return true if table compaction 
enabled
+256   */
+257  boolean isCompactionEnabled();
+258
+259  /**
+260   * Checks if this table is code 
hbase:meta /code region.
+261   *
+262   * @return true if this table is 
code hbase:meta /code region
+263   */
+264  boolean isMetaRegion();
+265
+266  /**
+267   * Checks if the table is a 
codehbase:meta/code table
 268   *
-269   * @return true if region normalization 
is enabled for this table
+269   * @return true if table is 
code hbase:meta /code region.
 270   */
-271  boolean isNormalizationEnabled();
+271  boolean isMetaTable();
 272
 273  /**
-274   * Check if the readOnly flag of the 
table is set. If the readOnly flag is set
-275   * then the contents of the table can 
only be read from but not modified.
+274   * Check if normalization enable flag 
of the table is true. If flag is false
+275   * then no region normalizer won't 
attempt to normalize this table.
 276   *
-277   * @return true if all columns in the 
table should be read only
+277   * @return true if region normalization 
is enabled for this table
 278   */
-279  boolean isReadOnly();
+279  boolean isNormalizationEnabled();
 280
-281}
+281  /**

[35/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
index aed7651..8681390 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/TableDescriptorBuilder.html
@@ -508,971 +508,978 @@
 500
 501@Override
 502public Bytes getValue(Bytes key) {
-503  return values.get(key);
-504}
-505
-506@Override
-507public byte[] getValue(byte[] key) 
{
-508  Bytes value = values.get(new 
Bytes(key));
-509  return value == null ? null : 
value.get();
-510}
-511
-512private T T 
getOrDefault(Bytes key, FunctionString, T function, T defaultValue) {
-513  Bytes value = values.get(key);
-514  if (value == null) {
-515return defaultValue;
-516  } else {
-517return 
function.apply(Bytes.toString(value.get(), value.getOffset(), 
value.getLength()));
-518  }
-519}
-520
-521/**
-522 * Getter for fetching an 
unmodifiable {@link #values} map.
-523 *
-524 * @return unmodifiable map {@link 
#values}.
-525 * @see #values
-526 */
-527@Override
-528public MapBytes, Bytes 
getValues() {
-529  // shallow pointer copy
-530  return 
Collections.unmodifiableMap(values);
-531}
-532
-533/**
-534 * Setter for storing metadata as a 
(key, value) pair in {@link #values} map
-535 *
-536 * @param key The key.
-537 * @param value The value. If null, 
removes the setting.
-538 * @return the modifyable TD
-539 * @see #values
-540 */
-541public ModifyableTableDescriptor 
setValue(byte[] key, byte[] value) {
-542  return setValue(toBytesOrNull(key, 
v - v),
-543  toBytesOrNull(value, v 
- v));
-544}
-545
-546/*
-547 * @param key The key.
-548 * @param value The value. If null, 
removes the setting.
-549 */
-550private ModifyableTableDescriptor 
setValue(final Bytes key,
-551final String value) {
-552  return setValue(key, 
toBytesOrNull(value, Bytes::toBytes));
-553}
-554
-555/*
-556 * Setter for storing metadata as a 
(key, value) pair in {@link #values} map
-557 *
-558 * @param key The key.
-559 * @param value The value. If null, 
removes the setting.
-560 */
-561public ModifyableTableDescriptor 
setValue(final Bytes key, final Bytes value) {
-562  if (value == null) {
-563values.remove(key);
-564  } else {
-565values.put(key, value);
-566  }
-567  return this;
-568}
-569
-570private static T Bytes 
toBytesOrNull(T t, FunctionT, byte[] f) {
-571  if (t == null) {
-572return null;
-573  } else {
-574return new Bytes(f.apply(t));
-575  }
-576}
-577
-578/**
-579 * Remove metadata represented by the 
key from the {@link #values} map
-580 *
-581 * @param key Key whose key and value 
we're to remove from TableDescriptor
-582 * parameters.
-583 * @return the modifyable TD
-584 */
-585public ModifyableTableDescriptor 
removeValue(Bytes key) {
-586  return setValue(key, (Bytes) 
null);
-587}
-588
-589/**
-590 * Remove metadata represented by the 
key from the {@link #values} map
-591 *
-592 * @param key Key whose key and value 
we're to remove from TableDescriptor
-593 * parameters.
-594 * @return the modifyable TD
-595 */
-596public ModifyableTableDescriptor 
removeValue(final byte[] key) {
-597  return removeValue(new 
Bytes(key));
-598}
-599
-600/**
-601 * Check if the readOnly flag of the 
table is set. If the readOnly flag is
-602 * set then the contents of the table 
can only be read from but not
-603 * modified.
-604 *
-605 * @return true if all columns in the 
table should be read only
-606 */
-607@Override
-608public boolean isReadOnly() {
-609  return getOrDefault(READONLY_KEY, 
Boolean::valueOf, DEFAULT_READONLY);
-610}
-611
-612/**
-613 * Setting the table as read only 
sets all the columns in the table as read
-614 * only. By default all tables are 
modifiable, but if the readOnly flag is
-615 * set to true then the contents of 
the table can only be read but not
-616 * modified.
-617 *
-618 * @param readOnly True if all of the 
columns in the table should be read
-619 * only.
-620 * @return the modifyable TD
-621 */
-622public ModifyableTableDescriptor 
setReadOnly(final boolean readOnly) {
-623  return setValue(READONLY_KEY, 
Boolean.toString(readOnly));
-624}
-625
-626/**
-627 * Check if the compaction enable 
flag of the table is true. If flag 

[20/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
index cd53b56..5f8eff1 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
@@ -593,10 +593,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private HTableDescriptor
-HBaseAdmin.CreateTableFuture.desc
-
-
-private HTableDescriptor
 TableSnapshotScanner.htd
 
 
@@ -611,7 +607,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 HTableDescriptor[]
 Admin.deleteTables(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
-Delete tables matching the passed in pattern and wait on 
completion.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.util.regex.Pattern)
+ and Admin.enableTable(org.apache.hadoop.hbase.TableName)
+
 
 
 
@@ -623,7 +624,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 HTableDescriptor[]
 Admin.deleteTables(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
-Deletes tables matching the passed in pattern and wait on 
completion.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.lang.String)
+ and Admin.enableTable(org.apache.hadoop.hbase.TableName)
+
 
 
 
@@ -633,7 +639,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 HTableDescriptor[]
 Admin.disableTables(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
-Disable tables matching the passed in pattern and wait on 
completion.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.util.regex.Pattern)
+ and Admin.disableTable(org.apache.hadoop.hbase.TableName)
+
 
 
 
@@ -643,7 +654,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 HTableDescriptor[]
 Admin.disableTables(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
-Disable tables matching the passed in pattern and wait on 
completion.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.lang.String)
+ and Admin.disableTable(org.apache.hadoop.hbase.TableName)
+
 
 
 
@@ -653,7 +669,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 HTableDescriptor[]
 Admin.enableTables(http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
-Enable tables matching the passed in pattern and wait on 
completion.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.util.regex.Pattern)
+ and Admin.enableTable(org.apache.hadoop.hbase.TableName)
+
 
 
 
@@ -663,7 +684,12 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 HTableDescriptor[]
 Admin.enableTables(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
-Enable tables matching the passed in pattern and wait on 
completion.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version
+ This is just a trivial helper method without any magic.
+ Consider using Admin.listTableDescriptors(java.lang.String)
+ and Admin.enableTable(org.apache.hadoop.hbase.TableName)
+
 
 
 
@@ -671,48 +697,52 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 HBaseAdmin.enableTables(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringregex)
 
 
+(package private) static HTableDescriptor
+HBaseAdmin.getHTableDescriptor(TableNametableName,
+  

[23/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/HTableDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HTableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/HTableDescriptor.html
index 6831fb2..fe98c7c 100644
--- a/devapidocs/org/apache/hadoop/hbase/HTableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/HTableDescriptor.html
@@ -597,7 +597,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getValue(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey)
 Deprecated.
-Getter for accessing the metadata associated with the 
key
+Getter for accessing the metadata associated with the 
key.
 
 
 
@@ -1228,7 +1228,7 @@ implements 
 
 NAMESPACE_FAMILY_INFO
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String NAMESPACE_FAMILY_INFO
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String NAMESPACE_FAMILY_INFO
 Deprecated.
 
 See Also:
@@ -1242,7 +1242,7 @@ implements 
 
 NAMESPACE_FAMILY_INFO_BYTES
-public static finalbyte[] NAMESPACE_FAMILY_INFO_BYTES
+public static finalbyte[] NAMESPACE_FAMILY_INFO_BYTES
 Deprecated.
 
 
@@ -1252,7 +1252,7 @@ implements 
 
 NAMESPACE_COL_DESC_BYTES
-public static finalbyte[] NAMESPACE_COL_DESC_BYTES
+public static finalbyte[] NAMESPACE_COL_DESC_BYTES
 Deprecated.
 
 
@@ -1262,7 +1262,7 @@ implements 
 
 NAMESPACE_TABLEDESC
-public static finalHTableDescriptor NAMESPACE_TABLEDESC
+public static finalHTableDescriptor NAMESPACE_TABLEDESC
 Deprecated.
 Table descriptor for namespace table
 
@@ -1421,30 +1421,13 @@ implements 
-
-
-
-
-getValue
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetValue(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey)
-Deprecated.
-Getter for accessing the metadata associated with the 
key
-
-Parameters:
-key - The key.
-Returns:
-The value.
-
-
-
 
 
 
 
 
 getValues
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapBytes,BytesgetValues()
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapBytes,BytesgetValues()
 Deprecated.
 
 Specified by:
@@ -1460,7 +1443,7 @@ implements 
 
 setValue
-publicHTableDescriptorsetValue(byte[]key,
+publicHTableDescriptorsetValue(byte[]key,
  byte[]value)
 Deprecated.
 Setter for storing metadata as a (key, value) pair in 
map
@@ -1477,7 +1460,7 @@ implements 
 
 setValue
-publicHTableDescriptorsetValue(Byteskey,
+publicHTableDescriptorsetValue(Byteskey,
  Bytesvalue)
 Deprecated.
 
@@ -1488,7 +1471,7 @@ implements 
 
 setValue
-publicHTableDescriptorsetValue(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey,
+publicHTableDescriptorsetValue(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey,
  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringvalue)
 Deprecated.
 Setter for storing metadata as a (key, value) pair in 
map
@@ -1505,7 +1488,7 @@ implements 
 
 remove
-publicvoidremove(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey)
+publicvoidremove(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringkey)
 Deprecated.
 Remove metadata represented by the key from the map
 
@@ -1521,7 +1504,7 @@ implements 
 
 remove
-publicvoidremove(Byteskey)
+publicvoidremove(Byteskey)
 Deprecated.
 Remove metadata represented by the key from the map
 
@@ -1537,7 +1520,7 @@ implements 
 
 remove
-publicvoidremove(byte[]key)
+publicvoidremove(byte[]key)
 Deprecated.
 Remove metadata represented by the key from the map
 
@@ -1553,7 +1536,7 @@ implements 
 
 isReadOnly
-publicbooleanisReadOnly()
+publicbooleanisReadOnly()
 Deprecated.
 Check if the readOnly flag of the table is set. If the 
readOnly flag is
  set then the contents of the table can only be read from but not 
modified.
@@ -1571,7 +1554,7 @@ implements 
 
 setReadOnly
-publicHTableDescriptorsetReadOnly(booleanreadOnly)
+publicHTableDescriptorsetReadOnly(booleanreadOnly)
 

[31/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/ImportTsv.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/ImportTsv.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/ImportTsv.html
index 1d5c1db..84104c6 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/ImportTsv.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/ImportTsv.html
@@ -71,732 +71,734 @@
 063import 
com.google.common.base.Preconditions;
 064import com.google.common.base.Splitter;
 065import com.google.common.collect.Lists;
-066
-067/**
-068 * Tool to import data from a TSV file.
-069 *
-070 * This tool is rather simplistic - it 
doesn't do any quoting or
-071 * escaping, but is useful for many data 
loads.
-072 *
-073 * @see ImportTsv#usage(String)
-074 */
-075@InterfaceAudience.Public
-076public class ImportTsv extends Configured 
implements Tool {
-077
-078  protected static final Log LOG = 
LogFactory.getLog(ImportTsv.class);
+066import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+067import 
org.apache.hadoop.hbase.client.TableDescriptor;
+068
+069/**
+070 * Tool to import data from a TSV file.
+071 *
+072 * This tool is rather simplistic - it 
doesn't do any quoting or
+073 * escaping, but is useful for many data 
loads.
+074 *
+075 * @see ImportTsv#usage(String)
+076 */
+077@InterfaceAudience.Public
+078public class ImportTsv extends Configured 
implements Tool {
 079
-080  final static String NAME = 
"importtsv";
+080  protected static final Log LOG = 
LogFactory.getLog(ImportTsv.class);
 081
-082  public final static String 
MAPPER_CONF_KEY = "importtsv.mapper.class";
-083  public final static String 
BULK_OUTPUT_CONF_KEY = "importtsv.bulk.output";
-084  public final static String 
TIMESTAMP_CONF_KEY = "importtsv.timestamp";
-085  public final static String 
JOB_NAME_CONF_KEY = "mapreduce.job.name";
-086  // TODO: the rest of these configs are 
used exclusively by TsvImporterMapper.
-087  // Move them out of the tool and let 
the mapper handle its own validation.
-088  public final static String 
DRY_RUN_CONF_KEY = "importtsv.dry.run";
-089  // If true, bad lines are logged to 
stderr. Default: false.
-090  public final static String 
LOG_BAD_LINES_CONF_KEY = "importtsv.log.bad.lines";
-091  public final static String 
SKIP_LINES_CONF_KEY = "importtsv.skip.bad.lines";
-092  public final static String 
SKIP_EMPTY_COLUMNS = "importtsv.skip.empty.columns";
-093  public final static String 
COLUMNS_CONF_KEY = "importtsv.columns";
-094  public final static String 
SEPARATOR_CONF_KEY = "importtsv.separator";
-095  public final static String 
ATTRIBUTE_SEPERATOR_CONF_KEY = "attributes.seperator";
-096  //This config is used to propagate 
credentials from parent MR jobs which launch
-097  //ImportTSV jobs. SEE 
IntegrationTestImportTsv.
-098  public final static String 
CREDENTIALS_LOCATION = "credentials_location";
-099  final static String DEFAULT_SEPARATOR = 
"\t";
-100  final static String 
DEFAULT_ATTRIBUTES_SEPERATOR = "=";
-101  final static String 
DEFAULT_MULTIPLE_ATTRIBUTES_SEPERATOR = ",";
-102  final static Class DEFAULT_MAPPER = 
TsvImporterMapper.class;
-103  public final static String 
CREATE_TABLE_CONF_KEY = "create.table";
-104  public final static String 
NO_STRICT_COL_FAMILY = "no.strict";
-105  /**
-106   * If table didn't exist and was 
created in dry-run mode, this flag is
-107   * flipped to delete it when MR ends.
-108   */
-109  private static boolean 
DRY_RUN_TABLE_CREATED;
-110
-111  public static class TsvParser {
-112/**
-113 * Column families and qualifiers 
mapped to the TSV columns
-114 */
-115private final byte[][] families;
-116private final byte[][] qualifiers;
-117
-118private final byte separatorByte;
+082  final static String NAME = 
"importtsv";
+083
+084  public final static String 
MAPPER_CONF_KEY = "importtsv.mapper.class";
+085  public final static String 
BULK_OUTPUT_CONF_KEY = "importtsv.bulk.output";
+086  public final static String 
TIMESTAMP_CONF_KEY = "importtsv.timestamp";
+087  public final static String 
JOB_NAME_CONF_KEY = "mapreduce.job.name";
+088  // TODO: the rest of these configs are 
used exclusively by TsvImporterMapper.
+089  // Move them out of the tool and let 
the mapper handle its own validation.
+090  public final static String 
DRY_RUN_CONF_KEY = "importtsv.dry.run";
+091  // If true, bad lines are logged to 
stderr. Default: false.
+092  public final static String 
LOG_BAD_LINES_CONF_KEY = "importtsv.log.bad.lines";
+093  public final static String 
SKIP_LINES_CONF_KEY = "importtsv.skip.bad.lines";
+094  public final static String 
SKIP_EMPTY_COLUMNS = "importtsv.skip.empty.columns";
+095  public final static String 
COLUMNS_CONF_KEY = "importtsv.columns";
+096  public final static String 
SEPARATOR_CONF_KEY = "importtsv.separator";
+097  public final 

[46/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/org/apache/hadoop/hbase/client/Admin.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Admin.html 
b/apidocs/org/apache/hadoop/hbase/client/Admin.html
index 364913f..737f8a5 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Admin.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Admin.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":38,"i4":6,"i5":6,"i6":18,"i7":18,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":38,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":18,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":18,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":6,"i69":38,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":18,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6,"i111":18,"i112":18,"i113":18,"i114":6,"i115":6,"i116":6,"i117":6,"i118":6,"i119":6,"
 
i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":6,"i130":6,"i131":6,"i132":6,"i133":6,"i134":6,"i135":6,"i136":6,"i137":38,"i138":6,"i139":6,"i140":38,"i141":6,"i142":6,"i143":6,"i144":6,"i145":6,"i146":6,"i147":6,"i148":6,"i149":6,"i150":6,"i151":18,"i152":18,"i153":6,"i154":6,"i155":6,"i156":6,"i157":6,"i158":6,"i159":6,"i160":6,"i161":6,"i162":6,"i163":6,"i164":6,"i165":6,"i166":6,"i167":6,"i168":6,"i169":6,"i170":6,"i171":6,"i172":6,"i173":6,"i174":6,"i175":6,"i176":6,"i177":6,"i178":6,"i179":6,"i180":6,"i181":6,"i182":6,"i183":6,"i184":6,"i185":18};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":38,"i4":6,"i5":6,"i6":18,"i7":18,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":50,"i33":50,"i34":50,"i35":6,"i36":6,"i37":6,"i38":50,"i39":6,"i40":38,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":38,"i52":38,"i53":6,"i54":6,"i55":18,"i56":6,"i57":6,"i58":6,"i59":38,"i60":38,"i61":6,"i62":6,"i63":18,"i64":6,"i65":6,"i66":6,"i67":38,"i68":38,"i69":6,"i70":6,"i71":6,"i72":6,"i73":38,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":18,"i92":6,"i93":38,"i94":38,"i95":38,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6,"i108":6,"i109":6,"i110":6,"i111":6,"i112":6,"i113":6,"i114":6,"i115":18,"i116":18,"i117":18,"i118"
 
:6,"i119":6,"i120":6,"i121":6,"i122":6,"i123":6,"i124":6,"i125":6,"i126":6,"i127":6,"i128":6,"i129":38,"i130":6,"i131":6,"i132":6,"i133":6,"i134":6,"i135":6,"i136":38,"i137":38,"i138":38,"i139":38,"i140":38,"i141":6,"i142":6,"i143":6,"i144":6,"i145":6,"i146":6,"i147":6,"i148":6,"i149":38,"i150":6,"i151":6,"i152":38,"i153":6,"i154":6,"i155":6,"i156":6,"i157":6,"i158":38,"i159":6,"i160":38,"i161":6,"i162":6,"i163":6,"i164":6,"i165":18,"i166":18,"i167":6,"i168":6,"i169":6,"i170":6,"i171":6,"i172":6,"i173":6,"i174":6,"i175":6,"i176":6,"i177":6,"i178":6,"i179":6,"i180":6,"i181":6,"i182":6,"i183":6,"i184":6,"i185":6,"i186":6,"i187":6,"i188":6,"i189":6,"i190":6,"i191":6,"i192":6,"i193":6,"i194":6,"i195":6,"i196":6,"i197":6,"i198":6,"i199":18};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"],16:["t5","Default 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -359,35 +359,76 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 
-void
+default void
 createTable(HTableDescriptordesc)
-Creates a new table.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version.
+ use createTable(TableDescriptor)
+
 
 
 
-void
+default void
 createTable(HTableDescriptordesc,
byte[][]splitKeys)
-Creates a new table with an initial set of empty regions 
defined by the specified split keys.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version.
+ use createTable(TableDescriptor,
 byte[][])
+
 
 
 
-void
+default void
 createTable(HTableDescriptordesc,
byte[]startKey,
byte[]endKey,
intnumRegions)
-Creates a new table with the specified number of 
regions.
+Deprecated.
+since 2.0 version and will 
be removed in 3.0 version.
+ use createTable(TableDescriptor,
 byte[], 

[22/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
index 9b3e33f..ff07afc 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
@@ -122,19 +122,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-org.apache.hadoop.hbase.regionserver
-
-
-
 org.apache.hadoop.hbase.replication
 
 Multi Cluster Replication
 
 
-
-org.apache.hadoop.hbase.security
-
-
 
 org.apache.hadoop.hbase.security.access
 
@@ -150,10 +142,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 service.
 
 
-
-org.apache.hadoop.hbase.tool
-
-
 
 
 
@@ -823,39 +811,6 @@ service.
 
 
 Uses of HColumnDescriptor in org.apache.hadoop.hbase.mapreduce
-
-Fields in org.apache.hadoop.hbase.mapreduce
 with type parameters of type HColumnDescriptor
-
-Modifier and Type
-Field and Description
-
-
-
-(package private) static http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true;
 title="class or interface in java.util.function">FunctionHColumnDescriptor,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-HFileOutputFormat2.blockSizeDetails
-Serialize column family to block size map to 
configuration.
-
-
-
-(package private) static http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true;
 title="class or interface in java.util.function">FunctionHColumnDescriptor,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-HFileOutputFormat2.bloomTypeDetails
-Serialize column family to bloom type map to 
configuration.
-
-
-
-(package private) static http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true;
 title="class or interface in java.util.function">FunctionHColumnDescriptor,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-HFileOutputFormat2.compressionDetails
-Serialize column family to compression algorithm map to 
configuration.
-
-
-
-(package private) static http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true;
 title="class or interface in java.util.function">FunctionHColumnDescriptor,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-HFileOutputFormat2.dataBlockEncodingDetails
-Serialize column family to data block encoding map to 
configuration.
-
-
-
-
 
 Methods in org.apache.hadoop.hbase.mapreduce
 with parameters of type HColumnDescriptor
 
@@ -887,20 +842,6 @@ service.
 
 
 
-
-Method parameters in org.apache.hadoop.hbase.mapreduce
 with type arguments of type HColumnDescriptor
-
-Modifier and Type
-Method and Description
-
-
-
-(package private) static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-HFileOutputFormat2.serializeColumnFamilyAttribute(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true;
 title="class or interface in java.util.function">FunctionHColumnDescriptor,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringfn,
-  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHTableDescriptorallTables)
-
-
-
 
 
 
@@ -1186,24 +1127,6 @@ service.
 
 
 static StoreFileWriter
-MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf,
-org.apache.hadoop.fs.FileSystemfs,
-HColumnDescriptorfamily,
-org.apache.hadoop.fs.Pathpath,
-longmaxKeyCount,
-Compression.Algorithmcompression,
-CacheConfigcacheConfig,
-Encryption.ContextcryptoContext,
-ChecksumTypechecksumType,
-intbytesPerChecksum,
-intblocksize,
-BloomTypebloomType,
-booleanisCompaction)
-Creates a writer for the mob file in temp directory.
-
-
-
-static StoreFileWriter
 MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf,
 org.apache.hadoop.fs.FileSystemfs,
 HColumnDescriptorfamily,
@@ -1218,7 +1141,7 @@ service.
 Creates a writer for the mob file in temp directory.
 
 
-
+
 static StoreFileWriter
 MobUtils.createWriter(org.apache.hadoop.conf.Configurationconf,
 

[39/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
index 75f0173..2fdac6a 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
@@ -106,1963 +106,2221 @@
 098   *
 099   * @return - returns an array of 
read-only HTableDescriptors
 100   * @throws IOException if a remote or 
network exception occurs
-101   */
-102  HTableDescriptor[] listTables() throws 
IOException;
-103
-104  /**
-105   * List all the userspace tables 
matching the given pattern.
-106   *
-107   * @param pattern The compiled regular 
expression to match against
-108   * @return - returns an array of 
read-only HTableDescriptors
-109   * @throws IOException if a remote or 
network exception occurs
-110   * @see #listTables()
-111   */
-112  HTableDescriptor[] listTables(Pattern 
pattern) throws IOException;
-113
-114  /**
-115   * List all the userspace tables 
matching the given regular expression.
-116   *
-117   * @param regex The regular expression 
to match against
-118   * @return - returns an array of 
HTableDescriptors
-119   * @throws IOException if a remote or 
network exception occurs
-120   * @see 
#listTables(java.util.regex.Pattern)
-121   */
-122  HTableDescriptor[] listTables(String 
regex) throws IOException;
-123
-124  /**
-125   * List all the tables matching the 
given pattern.
-126   *
-127   * @param pattern The compiled regular 
expression to match against
-128   * @param includeSysTables False to 
match only against userspace tables
-129   * @return - returns an array of 
read-only HTableDescriptors
-130   * @throws IOException if a remote or 
network exception occurs
-131   * @see #listTables()
-132   */
-133  HTableDescriptor[] listTables(Pattern 
pattern, boolean includeSysTables)
-134  throws IOException;
-135
-136  /**
-137   * List all the tables matching the 
given pattern.
-138   *
-139   * @param regex The regular expression 
to match against
-140   * @param includeSysTables False to 
match only against userspace tables
-141   * @return - returns an array of 
read-only HTableDescriptors
-142   * @throws IOException if a remote or 
network exception occurs
-143   * @see 
#listTables(java.util.regex.Pattern, boolean)
-144   */
-145  HTableDescriptor[] listTables(String 
regex, boolean includeSysTables)
-146  throws IOException;
-147
-148  /**
-149   * List all of the names of userspace 
tables.
-150   *
-151   * @return TableName[] table names
-152   * @throws IOException if a remote or 
network exception occurs
-153   */
-154  TableName[] listTableNames() throws 
IOException;
-155
-156  /**
-157   * List all of the names of userspace 
tables.
-158   * @param pattern The regular 
expression to match against
-159   * @return TableName[] table names
-160   * @throws IOException if a remote or 
network exception occurs
-161   */
-162  TableName[] listTableNames(Pattern 
pattern) throws IOException;
-163
-164  /**
-165   * List all of the names of userspace 
tables.
-166   * @param regex The regular expression 
to match against
-167   * @return TableName[] table names
-168   * @throws IOException if a remote or 
network exception occurs
-169   */
-170  TableName[] listTableNames(String 
regex) throws IOException;
-171
-172  /**
-173   * List all of the names of userspace 
tables.
-174   * @param pattern The regular 
expression to match against
-175   * @param includeSysTables False to 
match only against userspace tables
-176   * @return TableName[] table names
-177   * @throws IOException if a remote or 
network exception occurs
-178   */
-179  TableName[] listTableNames(final 
Pattern pattern, final boolean includeSysTables)
-180  throws IOException;
-181
-182  /**
-183   * List all of the names of userspace 
tables.
-184   * @param regex The regular expression 
to match against
-185   * @param includeSysTables False to 
match only against userspace tables
-186   * @return TableName[] table names
-187   * @throws IOException if a remote or 
network exception occurs
-188   */
-189  TableName[] listTableNames(final String 
regex, final boolean includeSysTables)
-190  throws IOException;
-191
-192  /**
-193   * Method for getting the 
tableDescriptor
-194   *
-195   * @param tableName as a {@link 
TableName}
-196   * @return the read-only 
tableDescriptor
-197   * @throws 
org.apache.hadoop.hbase.TableNotFoundException
-198   * @throws IOException if a remote or 
network exception occurs
-199   */
-200  HTableDescriptor 
getTableDescriptor(final TableName tableName)
-201  throws TableNotFoundException, 
IOException;
+101   * @deprecated since 2.0 version and 
will be removed in 3.0 version.
+102   * use {@link 

[30/51] [partial] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-07-08 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/9fb0764b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
index e9af038..53cae9a 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
@@ -78,1244 +78,1245 @@
 070import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 071import 
org.apache.hadoop.hbase.client.Admin;
 072import 
org.apache.hadoop.hbase.client.ClientServiceCallable;
-073import 
org.apache.hadoop.hbase.client.Connection;
-074import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-075import 
org.apache.hadoop.hbase.client.RegionLocator;
-076import 
org.apache.hadoop.hbase.client.RpcRetryingCallerFactory;
-077import 
org.apache.hadoop.hbase.client.SecureBulkLoadClient;
-078import 
org.apache.hadoop.hbase.client.Table;
-079import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-080import 
org.apache.hadoop.hbase.io.HFileLink;
-081import 
org.apache.hadoop.hbase.io.HalfStoreFileReader;
-082import 
org.apache.hadoop.hbase.io.Reference;
-083import 
org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
-084import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-085import 
org.apache.hadoop.hbase.io.hfile.HFile;
-086import 
org.apache.hadoop.hbase.io.hfile.HFileContext;
-087import 
org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
-088import 
org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
-089import 
org.apache.hadoop.hbase.io.hfile.HFileScanner;
-090import 
org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-091import 
org.apache.hadoop.hbase.regionserver.BloomType;
-092import 
org.apache.hadoop.hbase.regionserver.HStore;
-093import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-094import 
org.apache.hadoop.hbase.regionserver.StoreFileWriter;
-095import 
org.apache.hadoop.hbase.security.UserProvider;
-096import 
org.apache.hadoop.hbase.security.token.FsDelegationToken;
-097import 
org.apache.hadoop.hbase.util.Bytes;
-098import 
org.apache.hadoop.hbase.util.FSHDFSUtils;
-099import 
org.apache.hadoop.hbase.util.Pair;
-100import org.apache.hadoop.util.Tool;
-101import 
org.apache.hadoop.util.ToolRunner;
-102
-103/**
-104 * Tool to load the output of 
HFileOutputFormat into an existing table.
-105 */
-106@InterfaceAudience.Public
-107public class LoadIncrementalHFiles 
extends Configured implements Tool {
-108  private static final Log LOG = 
LogFactory.getLog(LoadIncrementalHFiles.class);
-109  private boolean initalized = false;
-110
-111  public static final String NAME = 
"completebulkload";
-112  static final String 
RETRY_ON_IO_EXCEPTION = "hbase.bulkload.retries.retryOnIOException";
-113  public static final String 
MAX_FILES_PER_REGION_PER_FAMILY
-114= 
"hbase.mapreduce.bulkload.max.hfiles.perRegion.perFamily";
-115  private static final String 
ASSIGN_SEQ_IDS = "hbase.mapreduce.bulkload.assign.sequenceNumbers";
-116  public final static String 
CREATE_TABLE_CONF_KEY = "create.table";
-117  public final static String 
IGNORE_UNMATCHED_CF_CONF_KEY = "ignore.unmatched.families";
-118  public final static String 
ALWAYS_COPY_FILES = "always.copy.files";
-119
-120  // We use a '.' prefix which is ignored 
when walking directory trees
-121  // above. It is invalid family name.
-122  final static String TMP_DIR = ".tmp";
-123
-124  private int 
maxFilesPerRegionPerFamily;
-125  private boolean assignSeqIds;
-126  private SetString 
unmatchedFamilies = new HashSet();
-127
-128  // Source filesystem
-129  private FileSystem fs;
-130  // Source delegation token
-131  private FsDelegationToken 
fsDelegationToken;
-132  private String bulkToken;
-133  private UserProvider userProvider;
-134  private int nrThreads;
-135  private RpcControllerFactory 
rpcControllerFactory;
-136  private AtomicInteger numRetries;
-137
-138  private MapLoadQueueItem, 
ByteBuffer retValue = null;
-139
-140  public 
LoadIncrementalHFiles(Configuration conf) throws Exception {
-141super(conf);
-142this.rpcControllerFactory = new 
RpcControllerFactory(conf);
-143initialize();
-144  }
-145
-146  private void initialize() throws 
IOException {
-147if (initalized) {
-148  return;
-149}
-150// make a copy, just to be sure we're 
not overriding someone else's config
-151
setConf(HBaseConfiguration.create(getConf()));
-152Configuration conf = getConf();
-153// disable blockcache for tool 
invocation, see HBASE-10500
-154
conf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0);
-155this.userProvider = 
UserProvider.instantiate(conf);
-156this.fsDelegationToken = new 
FsDelegationToken(userProvider, "renewer");
-157assignSeqIds = 

[4/4] hbase git commit: HBASE-18241 Change client.Table, client.Admin, Region, Store, and HBaseTestingUtility to not use HTableDescriptor or HColumnDescriptor

2017-07-08 Thread chia7712
HBASE-18241 Change client.Table, client.Admin, Region, Store, and 
HBaseTestingUtility to not use HTableDescriptor or HColumnDescriptor


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/53ec9c5b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/53ec9c5b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/53ec9c5b

Branch: refs/heads/branch-2
Commit: 53ec9c5bd35ee4814d1ec742b0ca5d649d5fc30e
Parents: c05a408
Author: Chia-Ping Tsai 
Authored: Sat Jul 8 17:00:15 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Jul 8 17:00:15 2017 +0800

--
 .../apache/hadoop/hbase/HTableDescriptor.java   |  16 +-
 .../org/apache/hadoop/hbase/client/Admin.java   | 280 -
 .../apache/hadoop/hbase/client/HBaseAdmin.java  | 144 -
 .../org/apache/hadoop/hbase/client/HTable.java  |  12 +-
 .../org/apache/hadoop/hbase/client/Table.java   |  11 +-
 .../hadoop/hbase/client/TableDescriptor.java|   8 +
 .../hbase/client/TableDescriptorBuilder.java|  11 +-
 .../hadoop/hbase/security/EncryptionUtil.java   |   3 +-
 .../hbase/shaded/protobuf/RequestConverter.java |   6 +-
 .../example/ZooKeeperScanPolicyObserver.java|   2 +-
 .../hadoop/hbase/rest/client/RemoteHTable.java  |   6 +
 .../backup/mapreduce/HFileSplitterJob.java  |   2 +-
 .../hadoop/hbase/client/HTableWrapper.java  |   5 +
 .../hbase/constraint/ConstraintProcessor.java   |   3 +-
 .../hadoop/hbase/constraint/Constraints.java|   3 +-
 .../hadoop/hbase/mapreduce/CopyTable.java   |   2 +-
 .../hbase/mapreduce/HFileOutputFormat2.java |  68 +++--
 .../apache/hadoop/hbase/mapreduce/Import.java   |   8 +-
 .../hadoop/hbase/mapreduce/ImportTsv.java   |  10 +-
 .../hbase/mapreduce/LoadIncrementalHFiles.java  |   7 +-
 .../mapreduce/MultiTableHFileOutputFormat.java  |   5 +-
 .../hadoop/hbase/mapreduce/WALPlayer.java   |   2 +-
 .../hbase/mob/DefaultMobStoreCompactor.java |   6 +-
 .../hbase/mob/DefaultMobStoreFlusher.java   |   6 +-
 .../apache/hadoop/hbase/mob/MobCacheConfig.java |   3 +-
 .../org/apache/hadoop/hbase/mob/MobUtils.java   |   3 +-
 .../hbase/quotas/ActivePolicyEnforcement.java   |   2 +-
 .../quotas/RegionServerRpcQuotaManager.java |   2 +-
 .../hadoop/hbase/regionserver/CompactSplit.java |   6 +-
 .../hbase/regionserver/CompactingMemStore.java  |   2 +-
 .../ConstantSizeRegionSplitPolicy.java  |   3 +-
 .../hbase/regionserver/DefaultStoreFlusher.java |   2 +-
 .../DelimitedKeyPrefixRegionSplitPolicy.java|   4 +-
 .../regionserver/FlushAllLargeStoresPolicy.java |   4 +-
 .../regionserver/FlushLargeStoresPolicy.java|   8 +-
 .../hbase/regionserver/FlushPolicyFactory.java  |   5 +-
 .../hadoop/hbase/regionserver/HMobStore.java|   9 +-
 .../hadoop/hbase/regionserver/HRegion.java  | 136 +
 .../hbase/regionserver/HRegionServer.java   |   6 +-
 .../hadoop/hbase/regionserver/HStore.java   |  21 +-
 ...IncreasingToUpperBoundRegionSplitPolicy.java |   9 +-
 .../KeyPrefixRegionSplitPolicy.java |  10 +-
 .../hbase/regionserver/MemStoreFlusher.java |   2 +-
 .../regionserver/MetricsRegionWrapperImpl.java  |   6 +-
 .../MetricsTableWrapperAggregateImpl.java   |   2 +-
 .../hbase/regionserver/RSDumpServlet.java   |   2 +-
 .../hbase/regionserver/RSRpcServices.java   |   6 +-
 .../hadoop/hbase/regionserver/Region.java   |   3 +-
 .../regionserver/RegionCoprocessorHost.java |  11 +-
 .../regionserver/RegionServicesForStores.java   |   2 +-
 .../hbase/regionserver/RegionSplitPolicy.java   |   7 +-
 .../hadoop/hbase/regionserver/ScanInfo.java |   6 +-
 .../regionserver/SecureBulkLoadManager.java |   2 +-
 .../apache/hadoop/hbase/regionserver/Store.java |   4 +-
 .../hbase/regionserver/StoreFileScanner.java|   2 +-
 .../hbase/regionserver/StripeStoreFlusher.java  |   2 +-
 .../regionserver/compactions/Compactor.java |  12 +-
 .../throttle/ThroughputControlUtil.java |   3 +-
 .../security/access/AccessControlLists.java |   2 +-
 .../hbase/security/access/AccessController.java |  15 +-
 .../security/visibility/VisibilityUtils.java|   3 +-
 .../hadoop/hbase/snapshot/SnapshotManifest.java |   2 +-
 .../org/apache/hadoop/hbase/tool/Canary.java|  30 +-
 .../hadoop/hbase/util/RegionSplitter.java   |  18 +-
 .../hbase/util/ServerRegionReplicaUtil.java |   2 +-
 .../hadoop/hbase/HBaseTestingUtility.java   | 298 ---
 .../apache/hadoop/hbase/MiniHBaseCluster.java   |   8 +-
 .../org/apache/hadoop/hbase/TestIOFencing.java  |  12 +-
 .../hadoop/hbase/backup/TestHFileArchiving.java |   4 +-
 .../hbase/backup/TestIncrementalBackup.java |   2 +-
 .../apache/hadoop/hbase/client/TestAdmin1.java  |   4 +-
 ...estAvoidCellReferencesIntoShippedBlocks.java |   2 +-
 

[1/4] hbase git commit: HBASE-18241 Change client.Table, client.Admin, Region, Store, and HBaseTestingUtility to not use HTableDescriptor or HColumnDescriptor

2017-07-08 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 c05a40809 -> 53ec9c5bd


http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
index 155c6b6..21089ed 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
@@ -45,11 +45,15 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
 import org.apache.hadoop.hbase.client.Delete;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileContext;
@@ -171,7 +175,7 @@ public class TestMobStoreCompaction {
 assertEquals("Before compaction: number of mob cells", compactionThreshold,
 countMobCellsInMetadata());
 // Change the threshold larger than the data size
-region.getTableDesc().getFamily(COLUMN_FAMILY).setMobThreshold(500);
+setMobThreshold(region, COLUMN_FAMILY, 500);
 region.initialize();
 region.compactStores();
 
@@ -182,6 +186,20 @@ public class TestMobStoreCompaction {
 assertEquals("After compaction: mob rows", 0, countMobRows());
   }
 
+  private static HRegion setMobThreshold(HRegion region, byte[] cfName, long 
modThreshold) {
+ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder
+.newBuilder(region.getTableDescriptor().getColumnFamily(cfName))
+.setMobThreshold(modThreshold)
+.build();
+TableDescriptor td = TableDescriptorBuilder
+.newBuilder(region.getTableDescriptor())
+.removeColumnFamily(cfName)
+.addColumnFamily(cfd)
+.build();
+region.setTableDescriptor(td);
+return region;
+  }
+
   /**
* This test will first generate store files, then bulk load them and 
trigger the compaction.
* When compaction, the cell value will be larger than the threshold.

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAbort.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAbort.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAbort.java
index 784c079..81dd630 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAbort.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerAbort.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
@@ -191,7 +192,7 @@ public class TestRegionServerAbort {
*/
   public static class ErrorThrowingHRegion extends HRegion {
 public ErrorThrowingHRegion(Path tableDir, WAL wal, FileSystem fs, 
Configuration confParam,
-HRegionInfo regionInfo, HTableDescriptor htd,
+HRegionInfo regionInfo, TableDescriptor htd,
 RegionServerServices rsServices) {
   super(tableDir, wal, fs, confParam, regionInfo, htd, rsServices);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
index 3c03827..f08fba0 100644
--- 

[3/4] hbase git commit: HBASE-18241 Change client.Table, client.Admin, Region, Store, and HBaseTestingUtility to not use HTableDescriptor or HColumnDescriptor

2017-07-08 Thread chia7712
http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
index e7157d0..5d3b50b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
@@ -332,7 +332,7 @@ public class CompactSplit implements CompactionRequestor, 
PropagatingConfigurati
   final String why, int priority, CompactionRequest request, boolean 
selectNow, User user)
   throws IOException {
 if (this.server.isStopped()
-|| (r.getTableDesc() != null && 
!r.getTableDesc().isCompactionEnabled())) {
+|| (r.getTableDescriptor() != null && 
!r.getTableDescriptor().isCompactionEnabled())) {
   return null;
 }
 
@@ -345,7 +345,7 @@ public class CompactSplit implements CompactionRequestor, 
PropagatingConfigurati
 final RegionServerSpaceQuotaManager spaceQuotaManager =
   this.server.getRegionServerSpaceQuotaManager();
 if (spaceQuotaManager != null && spaceQuotaManager.areCompactionsDisabled(
-r.getTableDesc().getTableName())) {
+r.getTableDescriptor().getTableName())) {
   if (LOG.isDebugEnabled()) {
 LOG.debug("Ignoring compaction request for " + r + " as an active 
space quota violation "
 + " policy disallows compactions.");
@@ -562,7 +562,7 @@ public class CompactSplit implements CompactionRequestor, 
PropagatingConfigurati
 public void run() {
   Preconditions.checkNotNull(server);
   if (server.isStopped()
-  || (region.getTableDesc() != null && 
!region.getTableDesc().isCompactionEnabled())) {
+  || (region.getTableDescriptor() != null && 
!region.getTableDescriptor().isCompactionEnabled())) {
 return;
   }
   doCompaction(user);

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
index 5b9372a..f07009c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactingMemStore.java
@@ -408,7 +408,7 @@ public class CompactingMemStore extends AbstractMemStore {
   }
 
   private byte[] getFamilyNameInBytes() {
-return store.getFamily().getName();
+return store.getColumnFamilyDescriptor().getName();
   }
 
   private ThreadPoolExecutor getPool() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ConstantSizeRegionSplitPolicy.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ConstantSizeRegionSplitPolicy.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ConstantSizeRegionSplitPolicy.java
index d915f2e..324c1de 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ConstantSizeRegionSplitPolicy.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ConstantSizeRegionSplitPolicy.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 
 /**
  * A {@link RegionSplitPolicy} implementation which splits a region
@@ -47,7 +48,7 @@ public class ConstantSizeRegionSplitPolicy extends 
RegionSplitPolicy {
   protected void configureForRegion(HRegion region) {
 super.configureForRegion(region);
 Configuration conf = getConf();
-HTableDescriptor desc = region.getTableDesc();
+TableDescriptor desc = region.getTableDescriptor();
 if (desc != null) {
   this.desiredMaxFileSize = desc.getMaxFileSize();
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
index ef49f29..21f93ff 100644
--- 

[2/4] hbase git commit: HBASE-18241 Change client.Table, client.Admin, Region, Store, and HBaseTestingUtility to not use HTableDescriptor or HColumnDescriptor

2017-07-08 Thread chia7712
http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
index 4441c08..32c08a9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
@@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.Tag;
 import org.apache.hadoop.hbase.TagType;
 import org.apache.hadoop.hbase.TagUtil;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.io.util.StreamUtils;
@@ -267,7 +268,7 @@ public class VisibilityUtils {
   public static Filter createVisibilityLabelFilter(Region region, 
Authorizations authorizations)
   throws IOException {
 Map cfVsMaxVersions = new HashMap<>();
-for (HColumnDescriptor hcd : region.getTableDesc().getFamilies()) {
+for (ColumnFamilyDescriptor hcd : 
region.getTableDescriptor().getColumnFamilies()) {
   cfVsMaxVersions.put(new SimpleMutableByteRange(hcd.getName()), 
hcd.getMaxVersions());
 }
 VisibilityLabelService vls = VisibilityLabelServiceManager.getInstance()

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
index 7ba5312..86687d9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
@@ -216,7 +216,7 @@ public final class SnapshotManifest {
 
 for (Store store : region.getStores()) {
   // 2.1. build the snapshot reference for the store
-  Object familyData = visitor.familyOpen(regionData, 
store.getFamily().getName());
+  Object familyData = visitor.familyOpen(regionData, 
store.getColumnFamilyDescriptor().getName());
   monitor.rethrowException();
 
   List storeFiles = new ArrayList<>(store.getStorefiles());

http://git-wip-us.apache.org/repos/asf/hbase/blob/53ec9c5b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
index 1e1aa9a..3316ec5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
@@ -71,6 +71,7 @@ import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Get;
@@ -79,6 +80,7 @@ import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.tool.Canary.RegionTask.TaskType;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -117,14 +119,14 @@ public final class Canary implements Tool {
 public long getReadFailureCount();
 public long incReadFailureCount();
 public void publishReadFailure(ServerName serverName, HRegionInfo region, 
Exception e);
-public void publishReadFailure(ServerName serverName, HRegionInfo region, 
HColumnDescriptor column, Exception e);
+public void publishReadFailure(ServerName serverName, HRegionInfo region, 
ColumnFamilyDescriptor column, Exception e);
 public void updateReadFailedHostList(HRegionInfo region, String 
serverName);
 public Map getReadFailures();
-public void publishReadTiming(ServerName serverName, HRegionInfo region, 
HColumnDescriptor column, long msTime);
+public void publishReadTiming(ServerName serverName, HRegionInfo region, 

[2/4] hbase git commit: HBASE-18241 Change client.Table, client.Admin, Region, Store, and HBaseTestingUtility to not use HTableDescriptor or HColumnDescriptor

2017-07-08 Thread chia7712
http://git-wip-us.apache.org/repos/asf/hbase/blob/bc8ebc6f/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
index 4441c08..32c08a9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
@@ -43,6 +43,7 @@ import org.apache.hadoop.hbase.Tag;
 import org.apache.hadoop.hbase.TagType;
 import org.apache.hadoop.hbase.TagUtil;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.io.util.StreamUtils;
@@ -267,7 +268,7 @@ public class VisibilityUtils {
   public static Filter createVisibilityLabelFilter(Region region, 
Authorizations authorizations)
   throws IOException {
 Map cfVsMaxVersions = new HashMap<>();
-for (HColumnDescriptor hcd : region.getTableDesc().getFamilies()) {
+for (ColumnFamilyDescriptor hcd : 
region.getTableDescriptor().getColumnFamilies()) {
   cfVsMaxVersions.put(new SimpleMutableByteRange(hcd.getName()), 
hcd.getMaxVersions());
 }
 VisibilityLabelService vls = VisibilityLabelServiceManager.getInstance()

http://git-wip-us.apache.org/repos/asf/hbase/blob/bc8ebc6f/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
index 7ba5312..86687d9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotManifest.java
@@ -216,7 +216,7 @@ public final class SnapshotManifest {
 
 for (Store store : region.getStores()) {
   // 2.1. build the snapshot reference for the store
-  Object familyData = visitor.familyOpen(regionData, 
store.getFamily().getName());
+  Object familyData = visitor.familyOpen(regionData, 
store.getColumnFamilyDescriptor().getName());
   monitor.rethrowException();
 
   List storeFiles = new ArrayList<>(store.getStorefiles());

http://git-wip-us.apache.org/repos/asf/hbase/blob/bc8ebc6f/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
index 1e1aa9a..3316ec5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
@@ -71,6 +71,7 @@ import org.apache.hadoop.hbase.TableNotEnabledException;
 import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Get;
@@ -79,6 +80,7 @@ import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
 import org.apache.hadoop.hbase.tool.Canary.RegionTask.TaskType;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -117,14 +119,14 @@ public final class Canary implements Tool {
 public long getReadFailureCount();
 public long incReadFailureCount();
 public void publishReadFailure(ServerName serverName, HRegionInfo region, 
Exception e);
-public void publishReadFailure(ServerName serverName, HRegionInfo region, 
HColumnDescriptor column, Exception e);
+public void publishReadFailure(ServerName serverName, HRegionInfo region, 
ColumnFamilyDescriptor column, Exception e);
 public void updateReadFailedHostList(HRegionInfo region, String 
serverName);
 public Map getReadFailures();
-public void publishReadTiming(ServerName serverName, HRegionInfo region, 
HColumnDescriptor column, long msTime);
+public void publishReadTiming(ServerName serverName, HRegionInfo region, 

[4/4] hbase git commit: HBASE-18241 Change client.Table, client.Admin, Region, Store, and HBaseTestingUtility to not use HTableDescriptor or HColumnDescriptor

2017-07-08 Thread chia7712
HBASE-18241 Change client.Table, client.Admin, Region, Store, and 
HBaseTestingUtility to not use HTableDescriptor or HColumnDescriptor


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bc8ebc6f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bc8ebc6f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bc8ebc6f

Branch: refs/heads/master
Commit: bc8ebc6f72fe0ed4f004d07400f38845def0ca17
Parents: 590f02a
Author: Chia-Ping Tsai 
Authored: Sat Jul 8 16:54:25 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Jul 8 16:54:25 2017 +0800

--
 .../apache/hadoop/hbase/HTableDescriptor.java   |  16 +-
 .../org/apache/hadoop/hbase/client/Admin.java   | 280 -
 .../apache/hadoop/hbase/client/HBaseAdmin.java  | 144 -
 .../org/apache/hadoop/hbase/client/HTable.java  |  12 +-
 .../org/apache/hadoop/hbase/client/Table.java   |  11 +-
 .../hadoop/hbase/client/TableDescriptor.java|   8 +
 .../hbase/client/TableDescriptorBuilder.java|  11 +-
 .../hadoop/hbase/security/EncryptionUtil.java   |   3 +-
 .../hbase/shaded/protobuf/RequestConverter.java |   6 +-
 .../example/ZooKeeperScanPolicyObserver.java|   2 +-
 .../hadoop/hbase/rest/client/RemoteHTable.java  |   6 +
 .../backup/mapreduce/HFileSplitterJob.java  |   2 +-
 .../hadoop/hbase/client/HTableWrapper.java  |   5 +
 .../hbase/constraint/ConstraintProcessor.java   |   3 +-
 .../hadoop/hbase/constraint/Constraints.java|   3 +-
 .../hadoop/hbase/mapreduce/CopyTable.java   |   2 +-
 .../hbase/mapreduce/HFileOutputFormat2.java |  68 +++--
 .../apache/hadoop/hbase/mapreduce/Import.java   |   8 +-
 .../hadoop/hbase/mapreduce/ImportTsv.java   |  10 +-
 .../hbase/mapreduce/LoadIncrementalHFiles.java  |   7 +-
 .../mapreduce/MultiTableHFileOutputFormat.java  |   5 +-
 .../hadoop/hbase/mapreduce/WALPlayer.java   |   2 +-
 .../hbase/mob/DefaultMobStoreCompactor.java |   6 +-
 .../hbase/mob/DefaultMobStoreFlusher.java   |   6 +-
 .../apache/hadoop/hbase/mob/MobCacheConfig.java |   3 +-
 .../org/apache/hadoop/hbase/mob/MobUtils.java   |   3 +-
 .../hbase/quotas/ActivePolicyEnforcement.java   |   2 +-
 .../quotas/RegionServerRpcQuotaManager.java |   2 +-
 .../hadoop/hbase/regionserver/CompactSplit.java |   6 +-
 .../hbase/regionserver/CompactingMemStore.java  |   2 +-
 .../ConstantSizeRegionSplitPolicy.java  |   3 +-
 .../hbase/regionserver/DefaultStoreFlusher.java |   2 +-
 .../DelimitedKeyPrefixRegionSplitPolicy.java|   4 +-
 .../regionserver/FlushAllLargeStoresPolicy.java |   4 +-
 .../regionserver/FlushLargeStoresPolicy.java|   8 +-
 .../hbase/regionserver/FlushPolicyFactory.java  |   5 +-
 .../hadoop/hbase/regionserver/HMobStore.java|   9 +-
 .../hadoop/hbase/regionserver/HRegion.java  | 136 +
 .../hbase/regionserver/HRegionServer.java   |   6 +-
 .../hadoop/hbase/regionserver/HStore.java   |  21 +-
 ...IncreasingToUpperBoundRegionSplitPolicy.java |   9 +-
 .../KeyPrefixRegionSplitPolicy.java |  10 +-
 .../hbase/regionserver/MemStoreFlusher.java |   2 +-
 .../regionserver/MetricsRegionWrapperImpl.java  |   6 +-
 .../MetricsTableWrapperAggregateImpl.java   |   2 +-
 .../hbase/regionserver/RSDumpServlet.java   |   2 +-
 .../hbase/regionserver/RSRpcServices.java   |   6 +-
 .../hadoop/hbase/regionserver/Region.java   |   3 +-
 .../regionserver/RegionCoprocessorHost.java |  11 +-
 .../regionserver/RegionServicesForStores.java   |   2 +-
 .../hbase/regionserver/RegionSplitPolicy.java   |   7 +-
 .../hadoop/hbase/regionserver/ScanInfo.java |   6 +-
 .../regionserver/SecureBulkLoadManager.java |   2 +-
 .../apache/hadoop/hbase/regionserver/Store.java |   4 +-
 .../hbase/regionserver/StoreFileScanner.java|   2 +-
 .../hbase/regionserver/StripeStoreFlusher.java  |   2 +-
 .../regionserver/compactions/Compactor.java |  12 +-
 .../throttle/ThroughputControlUtil.java |   3 +-
 .../security/access/AccessControlLists.java |   2 +-
 .../hbase/security/access/AccessController.java |  15 +-
 .../security/visibility/VisibilityUtils.java|   3 +-
 .../hadoop/hbase/snapshot/SnapshotManifest.java |   2 +-
 .../org/apache/hadoop/hbase/tool/Canary.java|  30 +-
 .../hadoop/hbase/util/RegionSplitter.java   |  18 +-
 .../hbase/util/ServerRegionReplicaUtil.java |   2 +-
 .../hadoop/hbase/HBaseTestingUtility.java   | 298 ---
 .../apache/hadoop/hbase/MiniHBaseCluster.java   |   8 +-
 .../org/apache/hadoop/hbase/TestIOFencing.java  |  12 +-
 .../hadoop/hbase/backup/TestHFileArchiving.java |   4 +-
 .../hbase/backup/TestIncrementalBackup.java |   2 +-
 .../apache/hadoop/hbase/client/TestAdmin1.java  |   4 +-
 ...estAvoidCellReferencesIntoShippedBlocks.java |   2 +-
 .../hadoop/hbase/client/TestMetaCache.java 

hbase git commit: HBASE-18341 Generalize regex matchers in findHangingTests.py script to match new consoleText of trunk build.

2017-07-08 Thread appy
Repository: hbase
Updated Branches:
  refs/heads/branch-2 5d4e4f200 -> c05a40809


HBASE-18341 Generalize regex matchers in findHangingTests.py script to match 
new consoleText of trunk build.

Change-Id: I0a4215827d3d561eef3f583da666c617f690d934


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c05a4080
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c05a4080
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c05a4080

Branch: refs/heads/branch-2
Commit: c05a4080938fe7a7ec3e3914740bc8a0c8c1b6c2
Parents: 5d4e4f2
Author: Apekshit Sharma 
Authored: Sat Jul 8 00:49:31 2017 -0700
Committer: Apekshit Sharma 
Committed: Sat Jul 8 00:52:16 2017 -0700

--
 dev-support/findHangingTests.py | 12 ++--
 1 file changed, 6 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c05a4080/dev-support/findHangingTests.py
--
diff --git a/dev-support/findHangingTests.py b/dev-support/findHangingTests.py
index 54275df..a8abdab 100755
--- a/dev-support/findHangingTests.py
+++ b/dev-support/findHangingTests.py
@@ -60,18 +60,18 @@ def get_bad_tests(console_url):
 failed_tests_set = set()
 timeout_tests_set = set()
 for line in response.content.splitlines():
-result1 = re.match("^Running 
org.apache.hadoop.hbase.(\\w*\\.)*(\\w*)", line)
-if result1:
-test_case = result1.group(2)
+result1 = re.findall("Running org.apache.hadoop.hbase.(.*)", line)
+if len(result1) == 1:
+test_case = result1[0]
 if test_case in all_tests_set:
 print ("ERROR! Multiple tests with same name '{}'. Might get 
wrong results "
"for this test.".format(test_case))
 else:
 hanging_tests_set.add(test_case)
 all_tests_set.add(test_case)
-result2 = re.match("^Tests run:.*- in 
org.apache.hadoop.hbase.(\\w*\\.)*(\\w*)", line)
-if result2:
-test_case = result2.group(2)
+result2 = re.findall("Tests run:.*?- in org.apache.hadoop.hbase.(.*)", 
line)
+if len(result2) == 1:
+test_case = result2[0]
 if "FAILURE!" in line:
 failed_tests_set.add(test_case)
 if test_case not in hanging_tests_set:



hbase git commit: HBASE-18341 Generalize regex matchers in findHangingTests.py script to match new consoleText of trunk build.

2017-07-08 Thread appy
Repository: hbase
Updated Branches:
  refs/heads/master 30d06dfe3 -> 590f02aad


HBASE-18341 Generalize regex matchers in findHangingTests.py script to match 
new consoleText of trunk build.

Change-Id: I0a4215827d3d561eef3f583da666c617f690d934


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/590f02aa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/590f02aa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/590f02aa

Branch: refs/heads/master
Commit: 590f02aad013a2955b82a3809202331b86bb2f12
Parents: 30d06df
Author: Apekshit Sharma 
Authored: Sat Jul 8 00:49:31 2017 -0700
Committer: Apekshit Sharma 
Committed: Sat Jul 8 00:50:20 2017 -0700

--
 dev-support/findHangingTests.py | 12 ++--
 1 file changed, 6 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/590f02aa/dev-support/findHangingTests.py
--
diff --git a/dev-support/findHangingTests.py b/dev-support/findHangingTests.py
index 54275df..a8abdab 100755
--- a/dev-support/findHangingTests.py
+++ b/dev-support/findHangingTests.py
@@ -60,18 +60,18 @@ def get_bad_tests(console_url):
 failed_tests_set = set()
 timeout_tests_set = set()
 for line in response.content.splitlines():
-result1 = re.match("^Running 
org.apache.hadoop.hbase.(\\w*\\.)*(\\w*)", line)
-if result1:
-test_case = result1.group(2)
+result1 = re.findall("Running org.apache.hadoop.hbase.(.*)", line)
+if len(result1) == 1:
+test_case = result1[0]
 if test_case in all_tests_set:
 print ("ERROR! Multiple tests with same name '{}'. Might get 
wrong results "
"for this test.".format(test_case))
 else:
 hanging_tests_set.add(test_case)
 all_tests_set.add(test_case)
-result2 = re.match("^Tests run:.*- in 
org.apache.hadoop.hbase.(\\w*\\.)*(\\w*)", line)
-if result2:
-test_case = result2.group(2)
+result2 = re.findall("Tests run:.*?- in org.apache.hadoop.hbase.(.*)", 
line)
+if len(result2) == 1:
+test_case = result2[0]
 if "FAILURE!" in line:
 failed_tests_set.add(test_case)
 if test_case not in hanging_tests_set: