hbase git commit: HBASE-15776 Replace master.am.getTableStateManager() with the direct master.getTableStateManager()

2016-05-06 Thread mbertozzi
Repository: hbase
Updated Branches:
  refs/heads/master 56358a0fd -> 513ca3483


HBASE-15776 Replace master.am.getTableStateManager() with the direct 
master.getTableStateManager()


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/513ca348
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/513ca348
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/513ca348

Branch: refs/heads/master
Commit: 513ca3483f1d32450ffa0c034e7a7f97b63ff582
Parents: 56358a0
Author: Matteo Bertozzi 
Authored: Fri May 6 19:30:45 2016 -0700
Committer: Matteo Bertozzi 
Committed: Fri May 6 19:30:45 2016 -0700

--
 .../main/java/org/apache/hadoop/hbase/master/HMaster.java | 10 +-
 .../hbase/master/procedure/AddColumnFamilyProcedure.java  |  2 +-
 .../hbase/master/procedure/CreateTableProcedure.java  |  2 +-
 .../master/procedure/DeleteColumnFamilyProcedure.java |  2 +-
 .../hbase/master/procedure/DeleteTableProcedure.java  |  4 ++--
 .../hbase/master/procedure/DisableTableProcedure.java |  7 +++
 .../hbase/master/procedure/EnableTableProcedure.java  |  6 +++---
 .../hbase/master/procedure/MasterDDLOperationHelper.java  |  2 +-
 .../master/procedure/ModifyColumnFamilyProcedure.java |  2 +-
 .../hbase/master/procedure/ModifyTableProcedure.java  |  4 ++--
 .../hadoop/hbase/master/snapshot/SnapshotManager.java |  6 ++
 .../java/org/apache/hadoop/hbase/client/TestAdmin1.java   |  6 +++---
 .../java/org/apache/hadoop/hbase/master/TestMaster.java   |  3 +--
 .../master/TestMasterRestartAfterDisablingTable.java  |  8 
 .../master/procedure/MasterProcedureTestingUtility.java   |  4 ++--
 15 files changed, 32 insertions(+), 36 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/513ca348/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index c2cab33..3829b35 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -944,10 +944,11 @@ public class HMaster extends HRegionServer implements 
MasterServices {
   assigned++;
 }
 
-if (replicaId == HRegionInfo.DEFAULT_REPLICA_ID)
+if (replicaId == HRegionInfo.DEFAULT_REPLICA_ID) {
+  // TODO: should we prevent from using state manager before meta was 
initialized?
+  // tableStateManager.start();
   getTableStateManager().setTableState(TableName.META_TABLE_NAME, 
TableState.State.ENABLED);
-// TODO: should we prevent from using state manager before meta was 
initialized?
-// tableStateManager.start();
+}
 
 if ((RecoveryMode.LOG_REPLAY == 
this.getMasterWalManager().getLogRecoveryMode())
 && (!previouslyFailedMetaRSs.isEmpty())) {
@@ -2096,8 +2097,7 @@ public class HMaster extends HRegionServer implements 
MasterServices {
 if (!MetaTableAccessor.tableExists(getConnection(), tableName)) {
   throw new TableNotFoundException(tableName);
 }
-if (!getAssignmentManager().getTableStateManager().
-isTableState(tableName, TableState.State.DISABLED)) {
+if (!getTableStateManager().isTableState(tableName, 
TableState.State.DISABLED)) {
   throw new TableNotDisabledException(tableName);
 }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/513ca348/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
index 4a6c67d..ce099ed 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
@@ -339,7 +339,7 @@ public class AddColumnFamilyProcedure
*/
   private void reOpenAllRegionsIfTableIsOnline(final MasterProcedureEnv env) 
throws IOException {
 // This operation only run when the table is enabled.
-if (!env.getMasterServices().getAssignmentManager().getTableStateManager()
+if (!env.getMasterServices().getTableStateManager()
 .isTableState(getTableName(), TableState.State.ENABLED)) {
   return;
 }


[23/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/coc.html
--
diff --git a/coc.html b/coc.html
index a7a6fb1..b675554 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Code of Conduct Policy
@@ -331,7 +331,7 @@ For flagrant violations requiring a firm response the PMC 
may opt to skip early
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-28
+  Last Published: 
2016-05-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index a42883e..f9e2c9f 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -673,7 +673,7 @@ Now your HBase server is running, start 
coding and build that next
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-28
+  Last Published: 
2016-05-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index a2b17fc..52cd10c 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependencies
 
@@ -518,7 +518,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-28
+  Last Published: 
2016-05-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 70f0de8..d456a57 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Reactor Dependency Convergence
 
@@ -503,19 +503,6 @@
 http://hbase.apache.org/hbase-common;>org.apache.hbase:hbase-common
 http://hbase.apache.org/hbase-server;>org.apache.hbase:hbase-server
 
-commons-httpclient:commons-httpclient
-
-
-
-
-
-
-3.1
-
-
-http://hbase.apache.org/hbase-rest;>org.apache.hbase:hbase-rest
-http://hbase.apache.org/hbase-server;>org.apache.hbase:hbase-server
-
 commons-io:commons-io
 
 
@@ -1169,6 +1156,7 @@
 2.0.0-SNAPSHOT
 
 
+http://hbase.apache.org/hbase-assembly;>org.apache.hbase:hbase-assembly
 http://hbase.apache.org/hbase-it;>org.apache.hbase:hbase-it
 http://hbase.apache.org/hbase-it;>org.apache.hbase:hbase-it
 http://hbase.apache.org/hbase-shell;>org.apache.hbase:hbase-shell
@@ -1286,6 +1274,19 @@
 http://hbase.apache.org/hbase-shell;>org.apache.hbase:hbase-shell
 http://hbase.apache.org/hbase-thrift;>org.apache.hbase:hbase-thrift
 
+org.apache.httpcomponents:httpclient
+
+
+
+
+
+
+4.3.6
+
+
+http://hbase.apache.org/hbase-rest;>org.apache.hbase:hbase-rest
+http://hbase.apache.org/hbase-server;>org.apache.hbase:hbase-server
+
 org.apache.spark:spark-core_2.10
 
 
@@ -1702,7 +1703,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-28
+  Last Published: 
2016-05-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index a7f80ed..cd24965 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Dependency Information
 
@@ -312,7 +312,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-28
+  Last Published: 
2016-05-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index e9766bf..46f9891 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+

[11/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
index f1d8c8d..9282a66 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
@@ -529,76 +529,68 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HTableDescriptor[]
-Admin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
+HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
 Delete tables matching the passed in pattern and wait on 
completion.
 
 
 
 HTableDescriptor[]
-HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
+Admin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
 Delete tables matching the passed in pattern and wait on 
completion.
 
 
 
 HTableDescriptor[]
+HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringregex)
+
+
+HTableDescriptor[]
 Admin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
 Deletes tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringregex)
+HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
 
-
+
 HTableDescriptor[]
 Admin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
 Disable tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
+HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringregex)
 
-
+
 HTableDescriptor[]
 Admin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
 Disable tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringregex)
+HBaseAdmin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
 
-
+
 HTableDescriptor[]
 Admin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
 Enable tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
+HBaseAdmin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringregex)
 
-
+
 HTableDescriptor[]
 Admin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
 Enable tables matching the passed in pattern and wait on 
completion.
 
 
-
-HTableDescriptor[]
-HBaseAdmin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringregex)
-
 
 HTableDescriptor
-HConnection.getHTableDescriptor(byte[]tableName)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-HTableDescriptor
 ConnectionImplementation.getHTableDescriptor(byte[]tableName)
 Deprecated.
 Use Admin.getTableDescriptor(org.apache.hadoop.hbase.TableName)
@@ -606,15 +598,15 @@ Input/OutputFormats, a 

[39/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/client/class-use/CompactionState.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/class-use/CompactionState.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/CompactionState.html
new file mode 100644
index 000..fb54f18
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/CompactionState.html
@@ -0,0 +1,185 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+Uses of Class org.apache.hadoop.hbase.client.CompactionState (Apache 
HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of 
Classorg.apache.hadoop.hbase.client.CompactionState
+
+
+
+
+
+Packages that use CompactionState
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.client
+
+Provides HBase Client
+
+
+
+
+
+
+
+
+
+
+Uses of CompactionState in org.apache.hadoop.hbase.client
+
+Methods in org.apache.hadoop.hbase.client
 that return CompactionState
+
+Modifier and Type
+Method and Description
+
+
+
+CompactionState
+Admin.getCompactionState(TableNametableName)
+Get the current compaction state of a table.
+
+
+
+CompactionState
+Admin.getCompactionState(TableNametableName,
+CompactTypecompactType)
+Get the current compaction state of a table.
+
+
+
+CompactionState
+Admin.getCompactionStateForRegion(byte[]regionName)
+Get the current compaction state of region.
+
+
+
+static CompactionState
+CompactionState.valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+Returns the enum constant of this type with the specified 
name.
+
+
+
+static CompactionState[]
+CompactionState.values()
+Returns an array containing the constants of this enum 
type, in
+the order they are declared.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+Copyright  20072016 http://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
index d0e70a0..c093366 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
@@ -138,19 +138,19 @@ the order they are declared.
 
 
 
-Get
-Get.setConsistency(Consistencyconsistency)
-
-
 Query
 Query.setConsistency(Consistencyconsistency)
 Sets the consistency level for this operation
 
 
-
+
 Scan
 Scan.setConsistency(Consistencyconsistency)
 
+
+Get
+Get.setConsistency(Consistencyconsistency)
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
index 0107748..b6e84d6 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
@@ -195,13 +195,13 @@ the order they are declared.
 Increment.setDurability(Durabilityd)
 
 
-Append
-Append.setDurability(Durabilityd)
-
-
 Delete
 Delete.setDurability(Durabilityd)
 
+
+Put
+Put.setDurability(Durabilityd)
+
 
 Mutation
 Mutation.setDurability(Durabilityd)
@@ -209,8 +209,8 @@ the order they are declared.
 
 
 
-Put
-Put.setDurability(Durabilityd)
+Append
+Append.setDurability(Durabilityd)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/client/class-use/IsolationLevel.html
--
diff 

[26/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotInfo.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotInfo.html 
b/apidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotInfo.html
index 2662234..d2f486e 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotInfo.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/snapshot/SnapshotInfo.html
@@ -44,515 +44,529 @@
 036import org.apache.hadoop.fs.FileSystem;
 037import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 038import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-039import 
org.apache.hadoop.conf.Configured;
-040import 
org.apache.hadoop.hbase.HRegionInfo;
-041import 
org.apache.hadoop.hbase.TableName;
-042import 
org.apache.hadoop.util.StringUtils;
-043import org.apache.hadoop.util.Tool;
-044import 
org.apache.hadoop.util.ToolRunner;
-045
-046import 
org.apache.hadoop.conf.Configuration;
-047import 
org.apache.hadoop.hbase.HBaseConfiguration;
-048import 
org.apache.hadoop.hbase.io.HFileLink;
-049import 
org.apache.hadoop.hbase.io.WALLink;
-050import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
-051import 
org.apache.hadoop.hbase.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
-052import 
org.apache.hadoop.hbase.util.FSUtils;
-053
-054/**
-055 * Tool for dumping snapshot 
information.
-056 * ol
-057 * li Table Descriptor
-058 * li Snapshot creation time, 
type, format version, ...
-059 * li List of hfiles and wals
-060 * li Stats about hfiles and logs 
sizes, percentage of shared with the source table, ...
-061 * /ol
-062 */
-063@InterfaceAudience.Public
-064@InterfaceStability.Evolving
-065public final class SnapshotInfo extends 
Configured implements Tool {
-066  private static final Log LOG = 
LogFactory.getLog(SnapshotInfo.class);
-067
-068  /**
-069   * Statistics about the snapshot
-070   * ol
-071   * li How many store files and 
logs are in the archive
-072   * li How many store files and 
logs are shared with the table
-073   * li Total store files and 
logs size and shared amount
-074   * /ol
-075   */
-076  public static class SnapshotStats {
-077/** Information about the file 
referenced by the snapshot */
-078static class FileInfo {
-079  private final boolean corrupted;
-080  private final boolean inArchive;
-081  private final long size;
-082
-083  FileInfo(final boolean inArchive, 
final long size, final boolean corrupted) {
-084this.corrupted = corrupted;
-085this.inArchive = inArchive;
-086this.size = size;
-087  }
-088
-089  /** @return true if the file is in 
the archive */
-090  public boolean inArchive() {
-091return this.inArchive;
-092  }
-093
-094  /** @return true if the file is 
corrupted */
-095  public boolean isCorrupted() {
-096return this.corrupted;
-097  }
-098
-099  /** @return true if the file is 
missing */
-100  public boolean isMissing() {
-101return this.size  0;
-102  }
-103
-104  /** @return the file size */
-105  public long getSize() {
-106return this.size;
-107  }
-108
-109  String getStateToString() {
-110if (isCorrupted()) return 
"CORRUPTED";
-111if (isMissing()) return "NOT 
FOUND";
-112if (inArchive()) return 
"archive";
-113return null;
-114  }
-115}
-116
-117private AtomicInteger 
hfilesArchiveCount = new AtomicInteger();
-118private AtomicInteger hfilesCorrupted 
= new AtomicInteger();
-119private AtomicInteger hfilesMissing = 
new AtomicInteger();
-120private AtomicInteger hfilesCount = 
new AtomicInteger();
-121private AtomicInteger hfilesMobCount 
= new AtomicInteger();
-122private AtomicInteger logsMissing = 
new AtomicInteger();
-123private AtomicInteger logsCount = new 
AtomicInteger();
-124private AtomicLong hfilesArchiveSize 
= new AtomicLong();
-125private AtomicLong hfilesSize = new 
AtomicLong();
-126private AtomicLong hfilesMobSize = 
new AtomicLong();
-127private AtomicLong logSize = new 
AtomicLong();
-128
-129private final SnapshotDescription 
snapshot;
-130private final TableName 
snapshotTable;
-131private final Configuration conf;
-132private final FileSystem fs;
-133
-134SnapshotStats(final Configuration 
conf, final FileSystem fs, final SnapshotDescription snapshot)
-135{
-136  this.snapshot = snapshot;
-137  this.snapshotTable = 
TableName.valueOf(snapshot.getTable());
-138  this.conf = conf;
-139  this.fs = fs;
-140}
-141
-142/** @return the snapshot descriptor 
*/
-143public SnapshotDescription 
getSnapshotDescription() {
-144  return this.snapshot;
-145}
-146
-147/** @return true if the snapshot is 
corrupted */
-148public boolean 

[05/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
index adb575b..f7e60ae 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
@@ -194,13 +194,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-EnableTableHandler
-EnableTableHandler.prepare()
-
-
 DisableTableHandler
 DisableTableHandler.prepare()
 
+
+EnableTableHandler
+EnableTableHandler.prepare()
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
index 2e6d3dd..8a4b480 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
@@ -146,15 +146,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 KeyValue.EMPTY_ARRAY_LIST
 
 
-private static http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorTag
-CellUtil.EMPTY_TAGS_ITR
-
-
 (package private) static http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorTag
 TagUtil.EMPTY_TAGS_ITR
 Iterator returned when no Tags.
 
 
+
+private static http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorTag
+CellUtil.EMPTY_TAGS_ITR
+
 
 
 
@@ -767,17 +767,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
-VisibilityLabelService.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringvisExpression,
+DefaultVisibilityLabelServiceImpl.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringvisExpression,
   
booleanwithSerializationFormat,
-  booleancheckAuths)
-Creates tags corresponding to given visibility 
expression.
-
+  
booleancheckAuths)
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
-DefaultVisibilityLabelServiceImpl.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringvisExpression,
+VisibilityLabelService.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringvisExpression,
   
booleanwithSerializationFormat,
-  
booleancheckAuths)
+  booleancheckAuths)
+Creates tags corresponding to given visibility 
expression.
+
 
 
 static http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
@@ -823,6 +823,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 byte[]
+DefaultVisibilityLabelServiceImpl.encodeVisibilityForReplication(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTagtags,
+http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in 
java.lang">ByteserializationFormat)
+
+
+byte[]
 VisibilityLabelService.encodeVisibilityForReplication(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTagvisTags,
 http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in 
java.lang">ByteserializationFormat)
 Provides a way to modify the visibility tags of type TagType
@@ -832,11 +837,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  .replicate().
 
 
-
-byte[]

[14/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
index b101bf6..d2fab47 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
@@ -459,38 +459,38 @@ service.
 
 
 void
-Admin.addColumn(TableNametableName,
+HBaseAdmin.addColumn(TableNametableName,
   HColumnDescriptorcolumnFamily)
 Deprecated.
-As of release 2.0.0.
- (https://issues.apache.org/jira/browse/HBASE-1989;>HBASE-1989).
- This will be removed in HBase 3.0.0.
- Use Admin.addColumnFamily(TableName,
 HColumnDescriptor).
+Since 2.0. Will be removed in 3.0. Use
+ HBaseAdmin.addColumnFamily(TableName,
 HColumnDescriptor) instead.
 
 
 
 
 void
-HBaseAdmin.addColumn(TableNametableName,
+Admin.addColumn(TableNametableName,
   HColumnDescriptorcolumnFamily)
 Deprecated.
-Since 2.0. Will be removed in 3.0. Use
- HBaseAdmin.addColumnFamily(TableName,
 HColumnDescriptor) instead.
+As of release 2.0.0.
+ (https://issues.apache.org/jira/browse/HBASE-1989;>HBASE-1989).
+ This will be removed in HBase 3.0.0.
+ Use Admin.addColumnFamily(TableName,
 HColumnDescriptor).
 
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/7/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+HBaseAdmin.addColumnFamily(TableNametableName,
+  HColumnDescriptorcolumnFamily)
+
+
+http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/7/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 Admin.addColumnFamily(TableNametableName,
   HColumnDescriptorcolumnFamily)
 Add a column family to an existing table.
 
 
-
-http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/7/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-HBaseAdmin.addColumnFamily(TableNametableName,
-  HColumnDescriptorcolumnFamily)
-
 
 UnmodifyableHTableDescriptor
 UnmodifyableHTableDescriptor.addFamily(HColumnDescriptorfamily)
@@ -499,38 +499,38 @@ service.
 
 
 void
-Admin.modifyColumn(TableNametableName,
+HBaseAdmin.modifyColumn(TableNametableName,
 HColumnDescriptorcolumnFamily)
 Deprecated.
-As of release 2.0.0.
- (https://issues.apache.org/jira/browse/HBASE-1989;>HBASE-1989).
- This will be removed in HBase 3.0.0.
- Use Admin.modifyColumnFamily(TableName,
 HColumnDescriptor).
+As of 2.0. Will be removed in 3.0. Use
+ HBaseAdmin.modifyColumnFamily(TableName,
 HColumnDescriptor) instead.
 
 
 
 
 void
-HBaseAdmin.modifyColumn(TableNametableName,
+Admin.modifyColumn(TableNametableName,
 HColumnDescriptorcolumnFamily)
 Deprecated.
-As of 2.0. Will be removed in 3.0. Use
- HBaseAdmin.modifyColumnFamily(TableName,
 HColumnDescriptor) instead.
+As of release 2.0.0.
+ (https://issues.apache.org/jira/browse/HBASE-1989;>HBASE-1989).
+ This will be removed in HBase 3.0.0.
+ Use Admin.modifyColumnFamily(TableName,
 HColumnDescriptor).
 
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/7/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+HBaseAdmin.modifyColumnFamily(TableNametableName,
+HColumnDescriptorcolumnFamily)
+
+
+http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/7/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 Admin.modifyColumnFamily(TableNametableName,
 HColumnDescriptorcolumnFamily)
 Modify an existing column family on a table.
 
 
-
-http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/7/docs/api/java/lang/Void.html?is-external=true;
 title="class 

[40/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html 
b/apidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html
new file mode 100644
index 000..4a239e5
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/client/SnapshotDescription.html
@@ -0,0 +1,411 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+SnapshotDescription (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Class 
SnapshotDescription
+
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.client.SnapshotDescription
+
+
+
+
+
+
+
+
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class SnapshotDescription
+extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+The POJO equivalent of HBaseProtos.SnapshotDescription
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+SnapshotDescription(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+
+
+SnapshotDescription(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringtable)
+
+
+SnapshotDescription(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringtable,
+  SnapshotTypetype)
+
+
+SnapshotDescription(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringtable,
+  SnapshotTypetype,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringowner)
+
+
+SnapshotDescription(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringtable,
+  SnapshotTypetype,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringowner,
+  longcreationTime,
+  intversion)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods
+
+Modifier and Type
+Method and Description
+
+
+long
+getCreationTime()
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+getName()
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+getOwner()
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+getTable()
+
+
+SnapshotType
+getType()
+
+
+int
+getVersion()
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+toString()
+
+
+
+
+
+
+Methods inherited from classjava.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object

[07/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/SettableSequenceId.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/SettableSequenceId.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/SettableSequenceId.html
index 84ae353..d2f1db0 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/SettableSequenceId.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/SettableSequenceId.html
@@ -91,13 +91,6 @@
 
 
 
-org.apache.hadoop.hbase.io.hfile
-
-Provides implementations of HFile and HFile
- BlockCache.
-
-
-
 org.apache.hadoop.hbase.util.test
 
 
@@ -284,32 +277,6 @@
 
 
 
-
-
-
-Uses of SettableSequenceId in org.apache.hadoop.hbase.io.hfile
-
-Classes in org.apache.hadoop.hbase.io.hfile
 that implement SettableSequenceId
-
-Modifier and Type
-Class and Description
-
-
-
-private static class
-HFileReaderImpl.HFileScannerImpl.ShareableMemoryKeyValue
-
-
-private static class
-HFileReaderImpl.HFileScannerImpl.ShareableMemoryNoTagsKeyValue
-
-
-private static class
-HFileReaderImpl.HFileScannerImpl.ShareableMemoryOffheapKeyValue
-
-
-
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/SettableTimestamp.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/SettableTimestamp.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/SettableTimestamp.html
index c580155..65a257f 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/SettableTimestamp.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/SettableTimestamp.html
@@ -78,13 +78,6 @@
 org.apache.hadoop.hbase
 
 
-
-org.apache.hadoop.hbase.io.hfile
-
-Provides implementations of HFile and HFile
- BlockCache.
-
-
 
 
 
@@ -141,28 +134,6 @@
 
 
 
-
-
-
-Uses of SettableTimestamp in org.apache.hadoop.hbase.io.hfile
-
-Classes in org.apache.hadoop.hbase.io.hfile
 that implement SettableTimestamp
-
-Modifier and Type
-Class and Description
-
-
-
-private static class
-HFileReaderImpl.HFileScannerImpl.ShareableMemoryKeyValue
-
-
-private static class
-HFileReaderImpl.HFileScannerImpl.ShareableMemoryNoTagsKeyValue
-
-
-
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/ShareableMemory.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ShareableMemory.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ShareableMemory.html
deleted file mode 100644
index 09e6b67..000
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ShareableMemory.html
+++ /dev/null
@@ -1,166 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-Uses of Interface org.apache.hadoop.hbase.ShareableMemory (Apache HBase 
2.0.0-SNAPSHOT API)
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-No Frames
-
-
-All Classes
-
-
-
-
-
-
-
-
-
-
-Uses of Interfaceorg.apache.hadoop.hbase.ShareableMemory
-
-
-
-
-
-Packages that use ShareableMemory
-
-Package
-Description
-
-
-
-org.apache.hadoop.hbase.io.hfile
-
-Provides implementations of HFile and HFile
- BlockCache.
-
-
-
-
-
-
-
-
-
-
-Uses of ShareableMemory in org.apache.hadoop.hbase.io.hfile
-
-Classes in org.apache.hadoop.hbase.io.hfile
 that implement ShareableMemory
-
-Modifier and Type
-Class and Description
-
-
-
-private static class
-HFileReaderImpl.HFileScannerImpl.ShareableMemoryKeyValue
-
-
-private static class
-HFileReaderImpl.HFileScannerImpl.ShareableMemoryNoTagsKeyValue
-
-
-private static class
-HFileReaderImpl.HFileScannerImpl.ShareableMemoryOffheapKeyValue
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev
-Next
-
-
-Frames
-No Frames
-
-
-All Classes
-
-
-
-
-
-
-
-
-
-Copyright  20072016 http://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
-
-

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/SizeCachedKeyValue.html
--
diff --git 

[34/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/overview-tree.html
--
diff --git a/apidocs/overview-tree.html b/apidocs/overview-tree.html
index 70df395..9429c41 100644
--- a/apidocs/overview-tree.html
+++ b/apidocs/overview-tree.html
@@ -459,6 +459,7 @@
 org.apache.hadoop.hbase.rsgroup.RSGroupInfo
 org.apache.hadoop.hbase.ServerLoad
 org.apache.hadoop.hbase.ServerName (implements 
java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
+org.apache.hadoop.hbase.client.SnapshotDescription
 org.apache.hadoop.hbase.types.Struct 
(implements org.apache.hadoop.hbase.types.DataTypeT)
 org.apache.hadoop.hbase.types.StructBuilder
 org.apache.hadoop.hbase.types.StructIterator 
(implements java.util.http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorE)
@@ -496,6 +497,7 @@
 
 java.io.http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
+org.apache.hadoop.hbase.CallDroppedException
 org.apache.hadoop.hbase.ipc.CallerDisconnectedException
 org.apache.hadoop.hbase.CallQueueTooBigException
 org.apache.hadoop.hbase.ipc.CallTimeoutException
@@ -826,23 +828,25 @@
 
 org.apache.hadoop.hbase.util.Order
 org.apache.hadoop.hbase.KeepDeletedCells
-org.apache.hadoop.hbase.filter.FilterList.Operator
+org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
 org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
 org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
-org.apache.hadoop.hbase.filter.Filter.ReturnCode
 org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
-org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
-org.apache.hadoop.hbase.client.IsolationLevel
-org.apache.hadoop.hbase.client.Consistency
-org.apache.hadoop.hbase.client.Durability
-org.apache.hadoop.hbase.client.Admin.CompactType
-org.apache.hadoop.hbase.client.Admin.MasterSwitchType
-org.apache.hadoop.hbase.client.security.SecurityCapability
+org.apache.hadoop.hbase.filter.Filter.ReturnCode
+org.apache.hadoop.hbase.filter.FilterList.Operator
 org.apache.hadoop.hbase.regionserver.BloomType
-org.apache.hadoop.hbase.quotas.QuotaType
-org.apache.hadoop.hbase.quotas.QuotaScope
 org.apache.hadoop.hbase.quotas.ThrottlingException.Type
+org.apache.hadoop.hbase.quotas.QuotaScope
+org.apache.hadoop.hbase.quotas.QuotaType
 org.apache.hadoop.hbase.quotas.ThrottleType
+org.apache.hadoop.hbase.client.Consistency
+org.apache.hadoop.hbase.client.IsolationLevel
+org.apache.hadoop.hbase.client.MasterSwitchType
+org.apache.hadoop.hbase.client.CompactionState
+org.apache.hadoop.hbase.client.CompactType
+org.apache.hadoop.hbase.client.Durability
+org.apache.hadoop.hbase.client.SnapshotType
+org.apache.hadoop.hbase.client.security.SecurityCapability
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/serialized-form.html
--
diff --git a/apidocs/serialized-form.html b/apidocs/serialized-form.html
index aeaf409..c170625 100644
--- a/apidocs/serialized-form.html
+++ b/apidocs/serialized-form.html
@@ -69,6 +69,11 @@
 
 Packageorg.apache.hadoop.hbase
 
+
+
+
+Class org.apache.hadoop.hbase.CallDroppedException 
extends http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException implements 
Serializable
+
 
 
 
@@ -857,7 +862,7 @@
 
 
 description
-org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription
 description
+SnapshotDescription 
description
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/src-html/org/apache/hadoop/hbase/CallDroppedException.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/CallDroppedException.html 
b/apidocs/src-html/org/apache/hadoop/hbase/CallDroppedException.html
new file mode 100644
index 000..481dedf
--- /dev/null
+++ b/apidocs/src-html/org/apache/hadoop/hbase/CallDroppedException.html
@@ -0,0 +1,115 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 

[09/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/ScheduledChore.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ScheduledChore.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ScheduledChore.html
index 34907e4..1526f62 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ScheduledChore.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ScheduledChore.html
@@ -172,43 +172,43 @@
 
 
 void
-ScheduledChore.ChoreServicer.cancelChore(ScheduledChorechore)
-Cancel any ongoing schedules that this chore has with the 
implementer of this interface.
-
+ChoreService.cancelChore(ScheduledChorechore)
 
 
 void
-ChoreService.cancelChore(ScheduledChorechore)
+ScheduledChore.ChoreServicer.cancelChore(ScheduledChorechore)
+Cancel any ongoing schedules that this chore has with the 
implementer of this interface.
+
 
 
 void
-ScheduledChore.ChoreServicer.cancelChore(ScheduledChorechore,
+ChoreService.cancelChore(ScheduledChorechore,
   booleanmayInterruptIfRunning)
 
 
 void
-ChoreService.cancelChore(ScheduledChorechore,
+ScheduledChore.ChoreServicer.cancelChore(ScheduledChorechore,
   booleanmayInterruptIfRunning)
 
 
 boolean
-ScheduledChore.ChoreServicer.isChoreScheduled(ScheduledChorechore)
+ChoreService.isChoreScheduled(ScheduledChorechore)
 
 
 boolean
-ChoreService.isChoreScheduled(ScheduledChorechore)
+ScheduledChore.ChoreServicer.isChoreScheduled(ScheduledChorechore)
 
 
 void
+ChoreService.onChoreMissedStartTime(ScheduledChorechore)
+
+
+void
 ScheduledChore.ChoreServicer.onChoreMissedStartTime(ScheduledChorechore)
 A callback that tells the implementer of this interface 
that one of the scheduled chores is
  missing its start time.
 
 
-
-void
-ChoreService.onChoreMissedStartTime(ScheduledChorechore)
-
 
 private void
 ChoreService.printChoreDetails(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringheader,
@@ -226,13 +226,13 @@
 
 
 boolean
-ScheduledChore.ChoreServicer.triggerNow(ScheduledChorechore)
-This method tries to execute the chore immediately.
-
+ChoreService.triggerNow(ScheduledChorechore)
 
 
 boolean
-ChoreService.triggerNow(ScheduledChorechore)
+ScheduledChore.ChoreServicer.triggerNow(ScheduledChorechore)
+This method tries to execute the chore immediately.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
index 329eb4a..ca5e8f5 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
@@ -324,8 +324,8 @@
 
 
 
-(package private) Server
-MasterFileSystem.master
+private Server
+ActiveMasterManager.master
 
 
 private Server
@@ -333,23 +333,19 @@
 
 
 private Server
-ActiveMasterManager.master
+RegionStateStore.server
 
 
-private Server
-SplitLogManager.server
+protected Server
+BulkAssigner.server
 
 
 private Server
-RegionStateStore.server
+CatalogJanitor.server
 
 
 private Server
-CatalogJanitor.server
-
-
-protected Server
-BulkAssigner.server
+SplitLogManager.server
 
 
 
@@ -399,24 +395,20 @@
   
booleanwaitTillAllAssigned)
 
 
-MasterFileSystem(Servermaster,
-MasterServicesservices)
-
-
 RegionStateStore(Serverserver)
 
-
+
 ServerManager(Servermaster,
   MasterServicesservices)
 Constructor.
 
 
-
+
 ServerManager(Servermaster,
   MasterServicesservices,
   booleanconnect)
 
-
+
 SplitLogManager(Serverserver,
   org.apache.hadoop.conf.Configurationconf,
   Stoppablestopper,
@@ -514,23 +506,23 @@
 
 
 private Server
-HeapMemoryManager.server
+LogRoller.server
 
 
 private Server
-LogRoller.server
+SplitTransactionImpl.server
 
 
 private Server
-RegionMergeTransactionImpl.server
+SplitTransactionImpl.DaughterOpener.server
 
 
 private Server
-SplitTransactionImpl.server
+HeapMemoryManager.server
 
 
 private Server
-SplitTransactionImpl.DaughterOpener.server
+RegionMergeTransactionImpl.server
 
 
 
@@ -543,23 +535,23 @@
 
 
 Server
-SplitTransaction.getServer()
-Get the Server running the transaction or rollback
-
+SplitTransactionImpl.getServer()
 
 
 Server
-RegionMergeTransaction.getServer()
+SplitTransaction.getServer()
 Get the Server running the transaction or rollback
 
 
 
 Server
-RegionMergeTransactionImpl.getServer()
+RegionMergeTransaction.getServer()
+Get the Server running the transaction or rollback
+
 
 
 Server
-SplitTransactionImpl.getServer()

[48/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apache_hbase_reference_guide.pdfmarks
--
diff --git a/apache_hbase_reference_guide.pdfmarks 
b/apache_hbase_reference_guide.pdfmarks
index 15b474f..af41864 100644
--- a/apache_hbase_reference_guide.pdfmarks
+++ b/apache_hbase_reference_guide.pdfmarks
@@ -2,8 +2,8 @@
   /Author (Apache HBase Team)
   /Subject ()
   /Keywords ()
-  /ModDate (D:20160428150631)
-  /CreationDate (D:20160428150631)
+  /ModDate (D:20160506145407)
+  /CreationDate (D:20160506145407)
   /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
   /Producer ()
   /DOCINFO pdfmark

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/allclasses-frame.html
--
diff --git a/apidocs/allclasses-frame.html b/apidocs/allclasses-frame.html
index 5450217..1ab3b6e 100644
--- a/apidocs/allclasses-frame.html
+++ b/apidocs/allclasses-frame.html
@@ -12,8 +12,6 @@
 
 AccessDeniedException
 Admin
-Admin.CompactType
-Admin.MasterSwitchType
 Append
 Attributes
 BadAuthException
@@ -37,6 +35,7 @@
 Bytes
 Bytes.ByteArrayComparator
 Bytes.RowEndKeyComparator
+CallDroppedException
 CallerDisconnectedException
 CallQueueTooBigException
 CallTimeoutException
@@ -55,6 +54,8 @@
 ColumnPaginationFilter
 ColumnPrefixFilter
 ColumnRangeFilter
+CompactionState
+CompactType
 CompareFilter
 CompareFilter.CompareOp
 ConfigurationUtil
@@ -152,6 +153,7 @@
 LockTimeoutException
 LongComparator
 MasterNotRunningException
+MasterSwitchType
 MD5Hash
 MergeRegionException
 MiniZooKeeperCluster
@@ -277,9 +279,11 @@
 SingleColumnValueFilter
 SkipFilter
 SnapshotCreationException
+SnapshotDescription
 SnapshotDoesNotExistException
 SnapshotExistsException
 SnapshotInfo
+SnapshotType
 StoppedRpcClientException
 Struct
 StructBuilder

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/allclasses-noframe.html
--
diff --git a/apidocs/allclasses-noframe.html b/apidocs/allclasses-noframe.html
index c778a46..a66c7a1 100644
--- a/apidocs/allclasses-noframe.html
+++ b/apidocs/allclasses-noframe.html
@@ -12,8 +12,6 @@
 
 AccessDeniedException
 Admin
-Admin.CompactType
-Admin.MasterSwitchType
 Append
 Attributes
 BadAuthException
@@ -37,6 +35,7 @@
 Bytes
 Bytes.ByteArrayComparator
 Bytes.RowEndKeyComparator
+CallDroppedException
 CallerDisconnectedException
 CallQueueTooBigException
 CallTimeoutException
@@ -55,6 +54,8 @@
 ColumnPaginationFilter
 ColumnPrefixFilter
 ColumnRangeFilter
+CompactionState
+CompactType
 CompareFilter
 CompareFilter.CompareOp
 ConfigurationUtil
@@ -152,6 +153,7 @@
 LockTimeoutException
 LongComparator
 MasterNotRunningException
+MasterSwitchType
 MD5Hash
 MergeRegionException
 MiniZooKeeperCluster
@@ -277,9 +279,11 @@
 SingleColumnValueFilter
 SkipFilter
 SnapshotCreationException
+SnapshotDescription
 SnapshotDoesNotExistException
 SnapshotExistsException
 SnapshotInfo
+SnapshotType
 StoppedRpcClientException
 Struct
 StructBuilder



[28/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
index b547fde..e36ae5f 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.html
@@ -32,645 +32,635 @@
 024import java.net.InetSocketAddress;
 025import java.net.UnknownHostException;
 026import java.util.ArrayList;
-027import java.util.HashMap;
-028import java.util.List;
-029
-030import org.apache.commons.logging.Log;
-031import 
org.apache.commons.logging.LogFactory;
-032import 
org.apache.hadoop.conf.Configuration;
-033import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-034import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-035import 
org.apache.hadoop.hbase.HConstants;
-036import 
org.apache.hadoop.hbase.HRegionLocation;
-037import 
org.apache.hadoop.hbase.TableName;
-038import 
org.apache.hadoop.hbase.client.Admin;
-039import 
org.apache.hadoop.hbase.client.Connection;
-040import 
org.apache.hadoop.hbase.client.RegionLocator;
-041import 
org.apache.hadoop.hbase.client.Result;
-042import 
org.apache.hadoop.hbase.client.Scan;
-043import 
org.apache.hadoop.hbase.client.Table;
-044import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-045import 
org.apache.hadoop.hbase.util.Addressing;
-046import 
org.apache.hadoop.hbase.util.Bytes;
-047import 
org.apache.hadoop.hbase.util.Pair;
-048import 
org.apache.hadoop.hbase.util.RegionSizeCalculator;
-049import 
org.apache.hadoop.hbase.util.Strings;
-050import 
org.apache.hadoop.mapreduce.InputFormat;
-051import 
org.apache.hadoop.mapreduce.InputSplit;
-052import 
org.apache.hadoop.mapreduce.JobContext;
-053import 
org.apache.hadoop.mapreduce.RecordReader;
-054import 
org.apache.hadoop.mapreduce.TaskAttemptContext;
-055import org.apache.hadoop.net.DNS;
-056import 
org.apache.hadoop.util.StringUtils;
-057
-058/**
-059 * A base for {@link TableInputFormat}s. 
Receives a {@link Connection}, a {@link TableName},
-060 * an {@link Scan} instance that defines 
the input columns etc. Subclasses may use
-061 * other TableRecordReader 
implementations.
-062 *
-063 * Subclasses MUST ensure 
initializeTable(Connection, TableName) is called for an instance to
-064 * function properly. Each of the entry 
points to this class used by the MapReduce framework,
-065 * {@link #createRecordReader(InputSplit, 
TaskAttemptContext)} and {@link #getSplits(JobContext)},
-066 * will call {@link 
#initialize(JobContext)} as a convenient centralized location to handle
-067 * retrieving the necessary configuration 
information. If your subclass overrides either of these
-068 * methods, either call the parent 
version or call initialize yourself.
-069 *
-070 * p
-071 * An example of a subclass:
-072 * pre
-073 *   class ExampleTIF extends 
TableInputFormatBase {
-074 *
-075 * {@literal @}Override
-076 * protected void 
initialize(JobContext context) throws IOException {
-077 *   // We are responsible for the 
lifecycle of this connection until we hand it over in
-078 *   // initializeTable.
-079 *   Connection connection = 
ConnectionFactory.createConnection(HBaseConfiguration.create(
-080 *  
job.getConfiguration()));
-081 *   TableName tableName = 
TableName.valueOf("exampleTable");
-082 *   // mandatory. once passed here, 
TableInputFormatBase will handle closing the connection.
-083 *   initializeTable(connection, 
tableName);
-084 *   byte[][] inputColumns = new byte 
[][] { Bytes.toBytes("columnA"),
-085 * Bytes.toBytes("columnB") };
-086 *   // optional, by default we'll 
get everything for the table.
-087 *   Scan scan = new Scan();
-088 *   for (byte[] family : 
inputColumns) {
-089 * scan.addFamily(family);
-090 *   }
-091 *   Filter exampleFilter = new 
RowFilter(CompareOp.EQUAL, new RegexStringComparator("aa.*"));
-092 *   scan.setFilter(exampleFilter);
-093 *   setScan(scan);
-094 * }
-095 *   }
-096 * /pre
-097 */
-098@InterfaceAudience.Public
-099@InterfaceStability.Stable
-100public abstract class 
TableInputFormatBase
-101extends 
InputFormatImmutableBytesWritable, Result {
-102
-103  /** Specify if we enable auto-balance 
for input in M/R jobs.*/
-104  public static final String 
MAPREDUCE_INPUT_AUTOBALANCE = "hbase.mapreduce.input.autobalance";
-105  /** Specify if ratio for data skew in 
M/R jobs, it goes well with the enabling hbase.mapreduce
-106   * .input.autobalance property.*/
-107  public static final String 
INPUT_AUTOBALANCE_MAXSKEWRATIO = "hbase.mapreduce.input.autobalance" +
-108  ".maxskewratio";
-109  /** Specify if the row key in 

[18/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HConstants.html 
b/devapidocs/org/apache/hadoop/hbase/HConstants.html
index cbebcb9..59d7c7a 100644
--- a/devapidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/devapidocs/org/apache/hadoop/hbase/HConstants.html
@@ -268,587 +268,591 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 CORRUPT_DIR_NAME
 
 
+static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+CP_HTD_ATTR_INCLUSION_KEY
+
+
 static http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_KEY_PATTERN
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_VALUE_PARAM_PATTERN
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_VALUE_PATTERN
 
  Pattern that matches a coprocessor specification.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY
 Configuration key for the name of the alternate cipher 
algorithm for the cluster, a string
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_CIPHERPROVIDER_CONF_KEY
 Configuration key for the crypto algorithm provider, a 
class name
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEY_ALGORITHM_CONF_KEY
 Configuration key for the algorithm used for creating jks 
key, a string
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEYPROVIDER_CONF_KEY
 Configuration key for the crypto key provider, a class 
name
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEYPROVIDER_PARAMETERS_KEY
 Configuration key for the crypto key provider 
parameters
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY
 Configuration key for the name of the alternate master key 
for the cluster, a string
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_MASTERKEY_NAME_CONF_KEY
 Configuration key for the name of the master key for the 
cluster, a string
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_WAL_ALGORITHM_CONF_KEY
 Configuration key for the algorithm to use when encrypting 
the WAL, a string
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_WAL_KEY_NAME_CONF_KEY
 Configuration key for the name of the master WAL encryption 
key for the cluster, a string
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 DATA_FILE_UMASK_KEY
 File permission umask to use when creating hbase data 
files
 
 
-
+
 static int
 DAY_IN_SECONDS
 Seconds in a day, hour and minute
 
 
-
+
 static int
 DEFAULT_BLOCKSIZE
 Default block size for an HFile.
 
 
-
+
 static boolean
 DEFAULT_CLUSTER_DISTRIBUTED
 Default value for cluster distributed mode
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/util/UUID.html?is-external=true;
 title="class or interface in java.util">UUID
 DEFAULT_CLUSTER_ID
 Default cluster ID, cannot be used to identify a cluster so 
a key with
  this value means it wasn't meant for replication.
 
 
-
+
 static boolean
 DEFAULT_DISALLOW_WRITES_IN_RECOVERING_CONFIG
 
-
+
 static boolean
 DEFAULT_DISTRIBUTED_LOG_REPLAY_CONFIG
 
-
+
 static boolean
 DEFAULT_ENABLE_CLIENT_BACKPRESSURE
 
-
+
 static int
 

[35/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
index 8752f71..227f913 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
@@ -116,44 +116,36 @@
 
 
 
-byte[]
-OrderedBlobVar.decode(PositionedByteRangesrc)
-
-
-http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
-OrderedInt8.decode(PositionedByteRangesrc)
-
-
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-RawString.decode(PositionedByteRangesrc)
+OrderedString.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
-RawInteger.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]
+Struct.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true;
 title="class or interface in java.lang">Double
-OrderedFloat64.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
+RawLong.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
-OrderedInt32.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
+OrderedInt16.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true;
 title="class or interface in java.lang">Double
-RawDouble.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true;
 title="class or interface in java.lang">Float
+RawFloat.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
-OrderedInt16.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
+RawByte.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-OrderedString.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
+OrderedInt64.decode(PositionedByteRangesrc)
 
 
-T
-TerminatedWrapper.decode(PositionedByteRangesrc)
+byte[]
+OrderedBlob.decode(PositionedByteRangesrc)
 
 
 T
@@ -162,73 +154,81 @@
 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
-RawShort.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true;
 title="class or interface in java.lang">Double
+OrderedFloat64.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]
-Struct.decode(PositionedByteRangesrc)
-
-
 http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
-RawByte.decode(PositionedByteRangesrc)
-
-
-byte[]
-OrderedBlob.decode(PositionedByteRangesrc)
+OrderedInt8.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true;
 title="class or interface in java.lang">Float
-OrderedFloat32.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
+RawInteger.decode(PositionedByteRangesrc)
 
 
 http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true;
 title="class or interface in java.lang">Float
-RawFloat.decode(PositionedByteRangesrc)
+OrderedFloat32.decode(PositionedByteRangesrc)
 
 
-byte[]
-RawBytes.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
+OrderedInt32.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Number.html?is-external=true;
 title="class or interface in java.lang">Number

[02/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/client/Append.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Append.html 
b/devapidocs/org/apache/hadoop/hbase/client/Append.html
index c0fa265..cd55a33 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Append.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Append.html
@@ -35,7 +35,7 @@
 
 
 
-Prev 
Class
+Prev 
Class
 Next 
Class
 
 
@@ -586,7 +586,7 @@ extends 
 
 
-Prev 
Class
+Prev 
Class
 Next 
Class
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.AsyncRequestFuture.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.AsyncRequestFuture.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.AsyncRequestFuture.html
index b1598a0..1e31795 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.AsyncRequestFuture.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.AsyncRequestFuture.html
@@ -95,7 +95,7 @@
 
 
 
-public static interface AsyncProcess.AsyncRequestFuture
+public static interface AsyncProcess.AsyncRequestFuture
 The context used to wait for results from one submit call.
  1) If AsyncProcess is set to track errors globally, and not per call (for 
HTable puts),
 then errors and failed operations in this object will reflect global 
errors.
@@ -161,7 +161,7 @@
 
 
 hasError
-booleanhasError()
+booleanhasError()
 
 
 
@@ -170,7 +170,7 @@
 
 
 getErrors
-RetriesExhaustedWithDetailsExceptiongetErrors()
+RetriesExhaustedWithDetailsExceptiongetErrors()
 
 
 
@@ -179,7 +179,7 @@
 
 
 getFailedOperations
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends RowgetFailedOperations()
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends RowgetFailedOperations()
 
 
 
@@ -188,7 +188,7 @@
 
 
 getResults
-http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]getResults()
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]getResults()
 throws http://docs.oracle.com/javase/7/docs/api/java/io/InterruptedIOException.html?is-external=true;
 title="class or interface in java.io">InterruptedIOException
 Throws:
 http://docs.oracle.com/javase/7/docs/api/java/io/InterruptedIOException.html?is-external=true;
 title="class or interface in 
java.io">InterruptedIOException
@@ -200,7 +200,7 @@
 
 
 waitUntilDone
-voidwaitUntilDone()
+voidwaitUntilDone()
throws http://docs.oracle.com/javase/7/docs/api/java/io/InterruptedIOException.html?is-external=true;
 title="class or interface in java.io">InterruptedIOException
 Wait until all tasks are executed, successfully or 
not.
 Throws:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
index 6b8fa43..2263aa1 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable.html
@@ -103,7 +103,7 @@
 
 
 
-private final class AsyncProcess.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable
+private final class AsyncProcess.AsyncRequestFutureImpl.ReplicaCallIssuingRunnable
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable
 Runnable (that can be submitted to thread pool) that waits 
for when it's time
@@ -215,7 +215,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.
 
 
 startTime
-private finallong startTime
+private finallong startTime
 
 
 
@@ -224,7 +224,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Runnable.
 
 
 initialActions
-private finalhttp://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListActionRow initialActions
+private 

[01/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 83cfd2adf -> 958717f4d


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.Retry.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.Retry.html 
b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.Retry.html
index d7e93e7..4afacfc 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.Retry.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/AsyncProcess.Retry.html
@@ -108,7 +108,7 @@
 
 
 
-private static enum AsyncProcess.Retry
+private static enum AsyncProcess.Retry
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumAsyncProcess.Retry
 For AsyncRequestFutureImpl#manageError(int, Row, 
Retry, Throwable, ServerName). Only
  used to make logging more clear, we don't actually care why we don't 
retry.
@@ -207,7 +207,7 @@ the order they are declared.
 
 
 YES
-public static finalAsyncProcess.Retry YES
+public static finalAsyncProcess.Retry YES
 
 
 
@@ -216,7 +216,7 @@ the order they are declared.
 
 
 NO_LOCATION_PROBLEM
-public static finalAsyncProcess.Retry NO_LOCATION_PROBLEM
+public static finalAsyncProcess.Retry NO_LOCATION_PROBLEM
 
 
 
@@ -225,7 +225,7 @@ the order they are declared.
 
 
 NO_NOT_RETRIABLE
-public static finalAsyncProcess.Retry NO_NOT_RETRIABLE
+public static finalAsyncProcess.Retry NO_NOT_RETRIABLE
 
 
 
@@ -234,7 +234,7 @@ the order they are declared.
 
 
 NO_RETRIES_EXHAUSTED
-public static finalAsyncProcess.Retry NO_RETRIES_EXHAUSTED
+public static finalAsyncProcess.Retry NO_RETRIES_EXHAUSTED
 
 
 
@@ -243,7 +243,7 @@ the order they are declared.
 
 
 NO_OTHER_SUCCEEDED
-public static finalAsyncProcess.Retry NO_OTHER_SUCCEEDED
+public static finalAsyncProcess.Retry NO_OTHER_SUCCEEDED
 
 
 
@@ -260,7 +260,7 @@ the order they are declared.
 
 
 values
-public staticAsyncProcess.Retry[]values()
+public staticAsyncProcess.Retry[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -277,7 +277,7 @@ for (AsyncProcess.Retry c : AsyncProcess.Retry.values())
 
 
 valueOf
-public staticAsyncProcess.RetryvalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticAsyncProcess.RetryvalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 



[42/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/client/Admin.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Admin.html 
b/apidocs/org/apache/hadoop/hbase/client/Admin.html
index f6dea0f..337b066 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Admin.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Admin.html
@@ -36,7 +36,7 @@
 
 
 Prev Class
-Next 
Class
+Next 
Class
 
 
 Frames
@@ -60,7 +60,7 @@
 
 
 Summary:
-Nested|
+Nested|
 Field|
 Constr|
 Method
@@ -93,7 +93,7 @@
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public interface Admin
+public interface Admin
 extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable
 The administrative API for HBase. Obtain an instance from 
an Connection.getAdmin()
 and
  call close()
 afterwards.
@@ -110,33 +110,6 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 
-
-
-
-
-
-Nested Class Summary
-
-Nested Classes
-
-Modifier and Type
-Interface and Description
-
-
-static class
-Admin.CompactType
-Currently, there are only two compact types:
- NORMAL means do store files compaction;
- MOB means do mob files compaction.
-
-
-
-static class
-Admin.MasterSwitchType
-
-
-
-
 
 
 
@@ -267,24 +240,24 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 void
-compact(TableNametableName,
-  Admin.CompactTypecompactType)
-Compact a table.
+compact(TableNametableName,
+  byte[]columnFamily)
+Compact a column family within a table.
 
 
 
 void
-compact(TableNametableName,
-  byte[]columnFamily)
+compact(TableNametableName,
+  byte[]columnFamily,
+  CompactTypecompactType)
 Compact a column family within a table.
 
 
 
 void
-compact(TableNametableName,
-  byte[]columnFamily,
-  Admin.CompactTypecompactType)
-Compact a column family within a table.
+compact(TableNametableName,
+  CompactTypecompactType)
+Compact a table.
 
 
 
@@ -560,20 +533,20 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 getClusterStatus()
 
 
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState
+CompactionState
 getCompactionState(TableNametableName)
 Get the current compaction state of a table.
 
 
 
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState
-getCompactionState(TableNametableName,
-Admin.CompactTypecompactType)
+CompactionState
+getCompactionState(TableNametableName,
+CompactTypecompactType)
 Get the current compaction state of a table.
 
 
 
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState
+CompactionState
 getCompactionStateForRegion(byte[]regionName)
 Get the current compaction state of region.
 
@@ -699,13 +672,13 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 boolean
-isSnapshotFinished(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionsnapshot)
+isSnapshotFinished(SnapshotDescriptionsnapshot)
 Check the current state of the passed snapshot.
 
 
 
 boolean
-isSplitOrMergeEnabled(Admin.MasterSwitchTypeswitchType)
+isSplitOrMergeEnabled(MasterSwitchTypeswitchType)
 Query the current state of the switch
 
 
@@ -742,19 +715,19 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
 listSnapshots()
 List completed snapshots.
 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
 listSnapshots(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
 List all the completed snapshots matching the given 
pattern.
 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
 

[27/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.html
index 5bed818..8b75278 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/WALPlayer.html
@@ -52,307 +52,314 @@
 044import 
org.apache.hadoop.hbase.client.RegionLocator;
 045import 
org.apache.hadoop.hbase.client.Table;
 046import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-047import 
org.apache.hadoop.hbase.regionserver.wal.WALEdit;
-048import 
org.apache.hadoop.hbase.util.Bytes;
-049import 
org.apache.hadoop.hbase.wal.WALKey;
-050import org.apache.hadoop.mapreduce.Job;
-051import 
org.apache.hadoop.mapreduce.Mapper;
-052import 
org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-053import 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-054import org.apache.hadoop.util.Tool;
-055import 
org.apache.hadoop.util.ToolRunner;
-056
-057/**
-058 * A tool to replay WAL files as a M/R 
job.
-059 * The WAL can be replayed for a set of 
tables or all tables,
-060 * and a timerange can be provided (in 
milliseconds).
-061 * The WAL is filtered to the passed set 
of tables and  the output
-062 * can optionally be mapped to another 
set of tables.
-063 *
-064 * WAL replay can also generate HFiles 
for later bulk importing,
-065 * in that case the WAL is replayed for a 
single table only.
-066 */
-067@InterfaceAudience.Public
-068@InterfaceStability.Stable
-069public class WALPlayer extends Configured 
implements Tool {
-070  private static final Log LOG = 
LogFactory.getLog(WALPlayer.class);
-071  final static String NAME = 
"WALPlayer";
-072  final static String 
BULK_OUTPUT_CONF_KEY = "wal.bulk.output";
-073  final static String TABLES_KEY = 
"wal.input.tables";
-074  final static String TABLE_MAP_KEY = 
"wal.input.tablesmap";
-075
-076  // This relies on Hadoop Configuration 
to handle warning about deprecated configs and
-077  // to set the correct non-deprecated 
configs when an old one shows up.
-078  static {
-079
Configuration.addDeprecation("hlog.bulk.output", BULK_OUTPUT_CONF_KEY);
-080
Configuration.addDeprecation("hlog.input.tables", TABLES_KEY);
-081
Configuration.addDeprecation("hlog.input.tablesmap", TABLE_MAP_KEY);
-082
Configuration.addDeprecation(HLogInputFormat.START_TIME_KEY, 
WALInputFormat.START_TIME_KEY);
-083
Configuration.addDeprecation(HLogInputFormat.END_TIME_KEY, 
WALInputFormat.END_TIME_KEY);
-084  }
-085
-086  private final static String 
JOB_NAME_CONF_KEY = "mapreduce.job.name";
-087
-088  protected WALPlayer(final Configuration 
c) {
-089super(c);
-090  }
-091
-092  /**
-093   * A mapper that just writes out 
KeyValues.
-094   * This one can be used together with 
{@link KeyValueSortReducer}
-095   */
-096  static class WALKeyValueMapper
-097  extends MapperWALKey, WALEdit, 
ImmutableBytesWritable, KeyValue {
-098private byte[] table;
-099
-100@Override
-101public void map(WALKey key, WALEdit 
value,
-102  Context context)
-103throws IOException {
-104  try {
-105// skip all other tables
-106if (Bytes.equals(table, 
key.getTablename().getName())) {
-107  for (Cell cell : 
value.getCells()) {
-108KeyValue kv = 
KeyValueUtil.ensureKeyValue(cell);
-109if 
(WALEdit.isMetaEditFamily(kv)) continue;
-110context.write(new 
ImmutableBytesWritable(CellUtil.cloneRow(kv)), kv);
-111  }
-112}
-113  } catch (InterruptedException e) 
{
-114e.printStackTrace();
-115  }
-116}
-117
-118@Override
-119public void setup(Context context) 
throws IOException {
-120  // only a single table is supported 
when HFiles are generated with HFileOutputFormat
-121  String[] tables = 
context.getConfiguration().getStrings(TABLES_KEY);
-122  if (tables == null || tables.length 
!= 1) {
-123// this can only happen when 
WALMapper is used directly by a class other than WALPlayer
-124throw new IOException("Exactly 
one table must be specified for bulk HFile case.");
-125  }
-126  table = Bytes.toBytes(tables[0]);
-127}
-128  }
-129
-130  /**
-131   * A mapper that writes out {@link 
Mutation} to be directly applied to
-132   * a running HBase instance.
-133   */
-134  protected static class WALMapper
-135  extends MapperWALKey, WALEdit, 
ImmutableBytesWritable, Mutation {
-136private MapTableName, 
TableName tables = new TreeMapTableName, TableName();
-137
-138@Override
-139public void map(WALKey key, WALEdit 
value, Context context)
-140throws IOException {
-141  try {
-142if (tables.isEmpty() || 
tables.containsKey(key.getTablename())) {
-143  

[33/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
index 33659cb..a38b7e1 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
@@ -916,364 +916,366 @@
 908  public static final Pattern 
CP_HTD_ATTR_VALUE_PARAM_PATTERN = Pattern.compile(
 909  "(" + 
CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN + ")=(" +
 910  
CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN + "),?");
-911
-912  /** The delay when re-trying a socket 
operation in a loop (HBASE-4712) */
-913  public static final int 
SOCKET_RETRY_WAIT_MS = 200;
-914
-915  /** Host name of the local machine */
-916  public static final String LOCALHOST = 
"localhost";
-917
-918  /**
-919   * If this parameter is set to true, 
then hbase will read
-920   * data and then verify checksums. 
Checksum verification
-921   * inside hdfs will be switched off.  
However, if the hbase-checksum
-922   * verification fails, then it will 
switch back to using
-923   * hdfs checksums for verifiying data 
that is being read from storage.
-924   *
-925   * If this parameter is set to false, 
then hbase will not
-926   * verify any checksums, instead it 
will depend on checksum verification
-927   * being done in the hdfs client.
-928   */
-929  public static final String 
HBASE_CHECKSUM_VERIFICATION =
-930  
"hbase.regionserver.checksum.verify";
-931
-932  public static final String LOCALHOST_IP 
= "127.0.0.1";
+911  public static final String 
CP_HTD_ATTR_INCLUSION_KEY =
+912  
"hbase.coprocessor.classloader.included.classes";
+913
+914  /** The delay when re-trying a socket 
operation in a loop (HBASE-4712) */
+915  public static final int 
SOCKET_RETRY_WAIT_MS = 200;
+916
+917  /** Host name of the local machine */
+918  public static final String LOCALHOST = 
"localhost";
+919
+920  /**
+921   * If this parameter is set to true, 
then hbase will read
+922   * data and then verify checksums. 
Checksum verification
+923   * inside hdfs will be switched off.  
However, if the hbase-checksum
+924   * verification fails, then it will 
switch back to using
+925   * hdfs checksums for verifiying data 
that is being read from storage.
+926   *
+927   * If this parameter is set to false, 
then hbase will not
+928   * verify any checksums, instead it 
will depend on checksum verification
+929   * being done in the hdfs client.
+930   */
+931  public static final String 
HBASE_CHECKSUM_VERIFICATION =
+932  
"hbase.regionserver.checksum.verify";
 933
-934  /** Conf key that enables unflushed WAL 
edits directly being replayed to region servers */
-935  public static final String 
DISTRIBUTED_LOG_REPLAY_KEY = "hbase.master.distributed.log.replay";
-936  public static final boolean 
DEFAULT_DISTRIBUTED_LOG_REPLAY_CONFIG = false;
-937  public static final String 
DISALLOW_WRITES_IN_RECOVERING =
-938  
"hbase.regionserver.disallow.writes.when.recovering";
-939  public static final boolean 
DEFAULT_DISALLOW_WRITES_IN_RECOVERING_CONFIG = false;
-940
-941  public static final String 
REGION_SERVER_HANDLER_COUNT = "hbase.regionserver.handler.count";
-942  public static final int 
DEFAULT_REGION_SERVER_HANDLER_COUNT = 30;
-943
-944  /*
-945   * 
REGION_SERVER_HANDLER_ABORT_ON_ERROR_PERCENT:
-946   * -1  = Disable aborting
-947   * 0   = Abort if even a single 
handler has died
-948   * 0.x = Abort only when this 
percent of handlers have died
-949   * 1   = Abort only all of the 
handers have died
-950   */
-951  public static final String 
REGION_SERVER_HANDLER_ABORT_ON_ERROR_PERCENT =
-952  
"hbase.regionserver.handler.abort.on.error.percent";
-953  public static final double 
DEFAULT_REGION_SERVER_HANDLER_ABORT_ON_ERROR_PERCENT = 0.5;
-954
-955  //High priority handlers to deal with 
admin requests and system table operation requests
-956  public static final String 
REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT =
-957  
"hbase.regionserver.metahandler.count";
-958  public static final int 
DEFAULT_REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT = 20;
-959
-960  public static final String 
REGION_SERVER_REPLICATION_HANDLER_COUNT =
-961  
"hbase.regionserver.replication.handler.count";
-962  public static final int 
DEFAULT_REGION_SERVER_REPLICATION_HANDLER_COUNT = 3;
-963
-964  public static final String 
MASTER_HANDLER_COUNT = "hbase.master.handler.count";
-965  public static final int 
DEFAULT_MASTER_HANLDER_COUNT = 25;
-966
-967  /** Conf key that specifies timeout 
value to wait for a region ready */
-968  public static final String 
LOG_REPLAY_WAIT_REGION_TIMEOUT =
-969  
"hbase.master.log.replay.wait.region.timeout";
-970
-971  /** Conf key for enabling meta 
replication */
-972  public static final String 
USE_META_REPLICAS = 

[17/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html 
b/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
index e252b6c..97174a2 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
@@ -258,7 +258,7 @@ the order they are declared.
 
 
 values
-public staticHealthChecker.HealthCheckerExitStatus[]values()
+public staticHealthChecker.HealthCheckerExitStatus[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -275,7 +275,7 @@ for (HealthChecker.HealthCheckerExitStatus c : 
HealthChecker.HealthCheckerExitSt
 
 
 valueOf
-public staticHealthChecker.HealthCheckerExitStatusvalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticHealthChecker.HealthCheckerExitStatusvalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/KeyValue.Type.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValue.Type.html 
b/devapidocs/org/apache/hadoop/hbase/KeyValue.Type.html
index 521f7b6..c31803c 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeyValue.Type.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.Type.html
@@ -331,7 +331,7 @@ the order they are declared.
 
 
 values
-public staticKeyValue.Type[]values()
+public staticKeyValue.Type[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -348,7 +348,7 @@ for (KeyValue.Type c : KeyValue.Type.values())
 
 
 valueOf
-public staticKeyValue.TypevalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticKeyValue.TypevalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html 
b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html
index 75a7f9a..f552c5a 100644
--- a/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html
+++ b/devapidocs/org/apache/hadoop/hbase/MetaTableAccessor.QueryType.html
@@ -275,7 +275,7 @@ the order they are declared.
 
 
 values
-public staticMetaTableAccessor.QueryType[]values()
+public staticMetaTableAccessor.QueryType[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -292,7 +292,7 @@ for (MetaTableAccessor.QueryType c : 
MetaTableAccessor.QueryType.values())
 
 
 valueOf
-public staticMetaTableAccessor.QueryTypevalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticMetaTableAccessor.QueryTypevalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/OffheapKeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/OffheapKeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/OffheapKeyValue.html
index 572a9a3..5583a7e 100644
--- a/devapidocs/org/apache/hadoop/hbase/OffheapKeyValue.html
+++ 

[45/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/org/apache/hadoop/hbase/HConstants.html
index a14c44e..aedb54d 100644
--- a/apidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/org/apache/hadoop/hbase/HConstants.html
@@ -241,587 +241,591 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 CORRUPT_DIR_NAME
 
 
+static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+CP_HTD_ATTR_INCLUSION_KEY
+
+
 static http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_KEY_PATTERN
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_VALUE_PARAM_PATTERN
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Pattern
 CP_HTD_ATTR_VALUE_PATTERN
 
  Pattern that matches a coprocessor specification.
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY
 Configuration key for the name of the alternate cipher 
algorithm for the cluster, a string
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_CIPHERPROVIDER_CONF_KEY
 Configuration key for the crypto algorithm provider, a 
class name
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEY_ALGORITHM_CONF_KEY
 Configuration key for the algorithm used for creating jks 
key, a string
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEYPROVIDER_CONF_KEY
 Configuration key for the crypto key provider, a class 
name
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEYPROVIDER_PARAMETERS_KEY
 Configuration key for the crypto key provider 
parameters
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY
 Configuration key for the name of the alternate master key 
for the cluster, a string
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_MASTERKEY_NAME_CONF_KEY
 Configuration key for the name of the master key for the 
cluster, a string
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_WAL_ALGORITHM_CONF_KEY
 Configuration key for the algorithm to use when encrypting 
the WAL, a string
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_WAL_KEY_NAME_CONF_KEY
 Configuration key for the name of the master WAL encryption 
key for the cluster, a string
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 DATA_FILE_UMASK_KEY
 File permission umask to use when creating hbase data 
files
 
 
-
+
 static int
 DAY_IN_SECONDS
 Seconds in a day, hour and minute
 
 
-
+
 static int
 DEFAULT_BLOCKSIZE
 Default block size for an HFile.
 
 
-
+
 static boolean
 DEFAULT_CLUSTER_DISTRIBUTED
 Default value for cluster distributed mode
 
 
-
+
 static http://docs.oracle.com/javase/7/docs/api/java/util/UUID.html?is-external=true;
 title="class or interface in java.util">UUID
 DEFAULT_CLUSTER_ID
 Default cluster ID, cannot be used to identify a cluster so 
a key with
  this value means it wasn't meant for replication.
 
 
-
+
 static boolean
 DEFAULT_DISALLOW_WRITES_IN_RECOVERING_CONFIG
 
-
+
 static boolean
 DEFAULT_DISTRIBUTED_LOG_REPLAY_CONFIG
 
-
+
 static boolean
 DEFAULT_ENABLE_CLIENT_BACKPRESSURE
 
-
+
 static int
 DEFAULT_HBASE_CLIENT_MAX_PERREGION_TASKS
 Default 

[50/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/acid-semantics.html
--
diff --git a/acid-semantics.html b/acid-semantics.html
index a72dbe8..38c7901 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Apache HBase (TM) ACID Properties
@@ -600,7 +600,7 @@ under the License. -->
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-04-28
+  Last Published: 
2016-05-06
 
 
 



[15/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
index 2e48c0d..cff4b22 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
@@ -234,37 +234,37 @@
 
 
 int
-BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparatorcomparator,
-Cellkey)
-
-
-int
 DataBlockEncoder.EncodedSeeker.compareKey(CellComparatorcomparator,
 Cellkey)
 Compare the given key against the current key
 
 
+
+int
+BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparatorcomparator,
+Cellkey)
+
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-DataBlockEncoder.createSeeker(CellComparatorcomparator,
-HFileBlockDecodingContextdecodingCtx)
-Create a HFileBlock seeker which find KeyValues within a 
block.
-
+PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
+HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
+DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
-HFileBlockDecodingContextdecodingCtx)
+DataBlockEncoder.createSeeker(CellComparatorcomparator,
+HFileBlockDecodingContextdecodingCtx)
+Create a HFileBlock seeker which find KeyValues within a 
block.
+
 
 
 DataBlockEncoder.EncodedSeeker
@@ -298,33 +298,33 @@
 
 
 
-protected CellComparator
-CompoundBloomFilterBase.comparator
-Comparator used to compare Bloom filter keys
-
-
-
 private CellComparator
 HFileReaderImpl.comparator
 Key comparator
 
 
-
+
 protected CellComparator
 HFileWriterImpl.comparator
 Key comparator.
 
 
-
-protected CellComparator
-HFile.WriterFactory.comparator
-
 
 private CellComparator
 HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
 Needed doing lookup on blocks.
 
 
+
+protected CellComparator
+CompoundBloomFilterBase.comparator
+Comparator used to compare Bloom filter keys
+
+
+
+protected CellComparator
+HFile.WriterFactory.comparator
+
 
 
 
@@ -499,28 +499,28 @@
 StripeStoreFileManager.cellComparator
 
 
-protected CellComparator
-StripeMultiFileWriter.comparator
+private CellComparator
+HStore.comparator
 
 
 private CellComparator
-ScanInfo.comparator
+Segment.comparator
 
 
-private CellComparator
-Segment.comparator
+protected CellComparator
+StripeStoreFlusher.StripeFlushRequest.comparator
 
 
 private CellComparator
-AbstractMemStore.comparator
+ScanInfo.comparator
 
 
-protected CellComparator
-StripeStoreFlusher.StripeFlushRequest.comparator
+private CellComparator
+AbstractMemStore.comparator
 
 
-private CellComparator
-HStore.comparator
+protected CellComparator
+StripeMultiFileWriter.comparator
 
 
 protected CellComparator
@@ -564,38 +564,38 @@
 HRegion.getCellCompartor()
 
 
-(package private) CellComparator
-StoreFileScanner.getComparator()
+CellComparator
+HStore.getComparator()
 
 
 CellComparator
-ScanInfo.getComparator()
+Store.getComparator()
 
 
-CellComparator
-StoreFileReader.getComparator()
-
-
 protected CellComparator
 Segment.getComparator()
 Returns the Cell comparator used by this segment
 
 
+
+(package private) CellComparator
+StoreFileScanner.getComparator()
+
 
-protected CellComparator
-AbstractMemStore.getComparator()
+CellComparator
+StoreFileReader.getComparator()
 
 
 CellComparator
-HStore.getComparator()
+KeyValueHeap.KVScannerComparator.getComparator()
 
 
 CellComparator
-Store.getComparator()
+ScanInfo.getComparator()
 
 
-CellComparator
-KeyValueHeap.KVScannerComparator.getComparator()
+protected CellComparator
+AbstractMemStore.getComparator()
 
 
 
@@ -608,18 +608,18 @@
 
 
 protected void
-ReversedStoreScanner.checkScanOrder(CellprevKV,
-Cellkv,
-CellComparatorcomparator)
-
-
-protected void
 StoreScanner.checkScanOrder(CellprevKV,
 Cellkv,
 CellComparatorcomparator)
 Check whether scan as expected order
 
 
+
+protected void
+ReversedStoreScanner.checkScanOrder(CellprevKV,
+Cellkv,
+CellComparatorcomparator)
+
 
 static StoreEngine?,?,?,?
 StoreEngine.create(Storestore,
@@ -629,30 +629,30 @@
 
 
 

[22/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/constant-values.html
--
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 0e4b9f3..95c8226 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -808,1763 +808,1770 @@
 "corrupt"
 
 
+
+
+publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+CP_HTD_ATTR_INCLUSION_KEY
+"hbase.coprocessor.classloader.included.classes"
+
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
 "[^=,]+"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
 "[^,]+"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY
 "hbase.crypto.alternate.key.algorithm"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_CIPHERPROVIDER_CONF_KEY
 "hbase.crypto.cipherprovider"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEY_ALGORITHM_CONF_KEY
 "hbase.crypto.key.algorithm"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEYPROVIDER_CONF_KEY
 "hbase.crypto.keyprovider"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEYPROVIDER_PARAMETERS_KEY
 "hbase.crypto.keyprovider.parameters"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY
 "hbase.crypto.master.alternate.key.name"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_MASTERKEY_NAME_CONF_KEY
 "hbase.crypto.master.key.name"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_WAL_ALGORITHM_CONF_KEY
 "hbase.crypto.wal.algorithm"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_WAL_KEY_NAME_CONF_KEY
 "hbase.crypto.wal.key.name"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 DATA_FILE_UMASK_KEY
 "hbase.data.umask"
 
-
+
 
 
 publicstaticfinalint
 DAY_IN_SECONDS
 86400
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_BLOCKSIZE
 65536
 
-
+
 
 
 publicstaticfinalboolean
 DEFAULT_CLUSTER_DISTRIBUTED
 false
 
-
+
 
 
 publicstaticfinalboolean
 DEFAULT_DISALLOW_WRITES_IN_RECOVERING_CONFIG
 false
 
-
+
 
 
 publicstaticfinalboolean
 DEFAULT_DISTRIBUTED_LOG_REPLAY_CONFIG
 false
 
-
+
 
 
 publicstaticfinalboolean
 DEFAULT_ENABLE_CLIENT_BACKPRESSURE
 false
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_CLIENT_MAX_PERREGION_TASKS
 1
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_CLIENT_MAX_PERSERVER_TASKS
 2
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_CLIENT_MAX_TOTAL_TASKS
 100
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT
 120
 
-
+
 
 
 publicstaticfinallong
 DEFAULT_HBASE_CLIENT_PAUSE
 100L
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_CLIENT_RETRIES_NUMBER
 31
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_CLIENT_SCANNER_CACHING
 2147483647
 
-
+
 
 
 publicstaticfinallong
 DEFAULT_HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE
 2097152L
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD
 6
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_META_BLOCK_SIZE
 8192
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_META_SCANNER_CACHING
 100
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_META_VERSIONS
 3
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT
 1
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_RPC_TIMEOUT
 6
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_SERVER_PAUSE
 1000
 
-
+
 
 
 publicstaticfinallong
 DEFAULT_HBASE_SERVER_SCANNER_MAX_RESULT_SIZE
 104857600L
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HEALTH_FAILURE_THRESHOLD
 3
 
-
+
 
 
 publicstaticfinallong
 

[20/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 26ecf1b..bade270 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -1126,7 +1126,7 @@
 
 Add a child procedure to execute
 
-addCipherOptions(DataTransferProtos.DataTransferEncryptorMessageProto.Builder,
 ListObject) - Method in interface 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.CipherHelper
+addCipherOptions(DataTransferProtos.DataTransferEncryptorMessageProto.Builder,
 ListObject) - Method in interface 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.CipherOptionHelper
 
 addClientPort(int)
 - Method in class org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster
 
@@ -2165,18 +2165,6 @@
 
 admin
 - Variable in class org.apache.hadoop.hbase.util.RegionMover.MoveWithoutAck
 
-Admin.CompactType - Enum in org.apache.hadoop.hbase.client
-
-Currently, there are only two compact types:
- NORMAL means do store files compaction;
- MOB means do mob files compaction.
-
-Admin.CompactType(int)
 - Constructor for enum org.apache.hadoop.hbase.client.Admin.CompactType
-
-Admin.MasterSwitchType - Enum in org.apache.hadoop.hbase.client
-
-Admin.MasterSwitchType()
 - Constructor for enum org.apache.hadoop.hbase.client.Admin.MasterSwitchType
-
 ADMIN_QOS 
- Static variable in class org.apache.hadoop.hbase.HConstants
 
 AdminAuthorizedServlet - Class in org.apache.hadoop.hbase.http
@@ -3410,6 +3398,8 @@
 
 Constructor
 
+asyncSnapshot(HBaseProtos.SnapshotDescription)
 - Method in class org.apache.hadoop.hbase.client.HBaseAdmin
+
 atHeadOfRingBufferEventHandlerAppend()
 - Method in class org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL
 
 Exposed for testing only.
@@ -5601,6 +5591,10 @@
 
 buildServerLoad(long,
 long) - Method in class org.apache.hadoop.hbase.regionserver.HRegionServer
 
+buildServiceCall(byte[],
 Descriptors.MethodDescriptor, Message) - Static method in class 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils
+
+Returns a service call instance for the given coprocessor 
request.
+
 buildSetQuotaRequestProto(QuotaSettings)
 - Static method in class org.apache.hadoop.hbase.quotas.QuotaSettings
 
 Convert a QuotaSettings to a protocol buffer 
SetQuotaRequest.
@@ -6592,6 +6586,10 @@
 
 calculateTokenLength()
 - Method in class org.apache.hadoop.hbase.codec.prefixtree.encode.column.ColumnNodeWriter
 
+calculateTotalSizeOfStoreFiles(WALEdit)
 - Method in class org.apache.hadoop.hbase.replication.regionserver.ReplicationSource.ReplicationSourceWorkerThread
+
+Calculate the total size of all the store files
+
 calculateWidth()
 - Method in class org.apache.hadoop.hbase.codec.prefixtree.encode.row.RowNodeWriter
 
 calculateWidthOverrideOffsetWidth(int)
 - Method in class org.apache.hadoop.hbase.codec.prefixtree.encode.row.RowNodeWriter
@@ -6739,6 +6737,8 @@
 
 call()
 - Method in class org.apache.hadoop.hbase.util.RegionMover.Unload
 
+CALL_DROPPED_EXCEPTION
 - Static variable in class org.apache.hadoop.hbase.ipc.CallRunner
+
 CALL_QUEUE_CODEL_DEFAULT_INTERVAL
 - Static variable in class org.apache.hadoop.hbase.ipc.SimpleRpcScheduler
 
 CALL_QUEUE_CODEL_DEFAULT_LIFO_THRESHOLD
 - Static variable in class org.apache.hadoop.hbase.ipc.SimpleRpcScheduler
@@ -6804,6 +6804,14 @@
 
 Number of calls outstanding, or 0 if a call succeeded (even 
with others outstanding).
 
+CallDroppedException - Exception in org.apache.hadoop.hbase
+
+Returned to the clients when their request was discarded 
due to server being overloaded.
+
+CallDroppedException()
 - Constructor for exception org.apache.hadoop.hbase.CallDroppedException
+
+CallDroppedException(String)
 - Constructor for exception org.apache.hadoop.hbase.CallDroppedException
+
 caller
 - Variable in class org.apache.hadoop.hbase.client.ClientScanner
 
 caller
 - Variable in class org.apache.hadoop.hbase.client.ScannerCallableWithReplicas
@@ -8074,7 +8082,7 @@
 
 checkFamily(byte[])
 - Method in class org.apache.hadoop.hbase.regionserver.HRegion
 
-checkFileSystem()
 - Method in class org.apache.hadoop.hbase.master.MasterFileSystem
+checkFileSystem()
 - Method in class org.apache.hadoop.hbase.master.MasterWalManager
 
 Checks to see if the file system is still accessible.
 
@@ -8604,7 +8612,7 @@
 
 checkZk()
 - Method in class org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper
 
-Try to create a Zookeeper connection.
+Try to create a ZooKeeper connection.
 
 childExps
 - Variable in class org.apache.hadoop.hbase.security.visibility.expression.NonLeafExpressionNode
 
@@ -8750,10 +8758,10 @@
 
 Default cipher for encryption
 
-CIPHER_HELPER
 - Static variable in class org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper
-
 CIPHER_MODE_KEY
 - Static variable in class org.apache.hadoop.hbase.io.crypto.aes.AES
 
+CIPHER_OPTION_HELPER
 - 

[37/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/filter/package-tree.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/package-tree.html 
b/apidocs/org/apache/hadoop/hbase/filter/package-tree.html
index f923e69..70499d5 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -153,11 +153,11 @@
 
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.filter.FilterList.Operator
 org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
 org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
-org.apache.hadoop.hbase.filter.Filter.ReturnCode
 org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
+org.apache.hadoop.hbase.filter.Filter.ReturnCode
+org.apache.hadoop.hbase.filter.FilterList.Operator
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index 286a109..8b60247 100644
--- a/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ b/apidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -150,11 +150,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableRecordReader.createKey()
+TableRecordReaderImpl.createKey()
 
 
 ImmutableBytesWritable
-TableRecordReaderImpl.createKey()
+TableRecordReader.createKey()
 
 
 
@@ -175,13 +175,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
-MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
+TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
   org.apache.hadoop.mapred.JobConfjob,
   
org.apache.hadoop.mapred.Reporterreporter)
 
 
 org.apache.hadoop.mapred.RecordReaderImmutableBytesWritable,Result
-TableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
+MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplitsplit,
   org.apache.hadoop.mapred.JobConfjob,
   
org.apache.hadoop.mapred.Reporterreporter)
 
@@ -202,30 +202,30 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-GroupingTableMap.map(ImmutableBytesWritablekey,
+IdentityTableMap.map(ImmutableBytesWritablekey,
   Resultvalue,
   org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
   org.apache.hadoop.mapred.Reporterreporter)
-Extract the grouping columns from value to construct a new 
key.
+Pass the key, value to reduce
 
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritablekey,
+GroupingTableMap.map(ImmutableBytesWritablekey,
   Resultvalue,
   org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
   org.apache.hadoop.mapred.Reporterreporter)
-Pass the key, value to reduce
+Extract the grouping columns from value to construct a new 
key.
 
 
 
 boolean
-TableRecordReader.next(ImmutableBytesWritablekey,
+TableRecordReaderImpl.next(ImmutableBytesWritablekey,
 Resultvalue)
 
 
 boolean
-TableRecordReaderImpl.next(ImmutableBytesWritablekey,
+TableRecordReader.next(ImmutableBytesWritablekey,
 Resultvalue)
 
 
@@ -248,20 +248,20 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-GroupingTableMap.map(ImmutableBytesWritablekey,
+IdentityTableMap.map(ImmutableBytesWritablekey,
   Resultvalue,
   org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
   org.apache.hadoop.mapred.Reporterreporter)
-Extract the grouping columns from value to construct a new 
key.
+Pass the key, value to reduce
 
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritablekey,
+GroupingTableMap.map(ImmutableBytesWritablekey,
   Resultvalue,
   org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
   org.apache.hadoop.mapred.Reporterreporter)
-Pass the key, value to reduce
+Extract the grouping columns from value to construct a new 
key.
 
 
 
@@ 

[51/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/958717f4
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/958717f4
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/958717f4

Branch: refs/heads/asf-site
Commit: 958717f4dd62a9229fbb0c06e59d5a0ea1bde521
Parents: 83cfd2a
Author: jenkins 
Authored: Fri May 6 15:04:11 2016 +
Committer: Misty Stanley-Jones 
Committed: Fri May 6 14:46:36 2016 -0700

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 39682 +
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 apidocs/allclasses-frame.html   | 8 +-
 apidocs/allclasses-noframe.html | 8 +-
 apidocs/constant-values.html|   509 +-
 apidocs/index-all.html  |   134 +-
 .../hadoop/hbase/CallDroppedException.html  |   271 +
 .../hadoop/hbase/CallQueueTooBigException.html  | 6 +-
 apidocs/org/apache/hadoop/hbase/HConstants.html |   796 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   | 4 +-
 .../hbase/class-use/CallDroppedException.html   |   115 +
 .../org/apache/hadoop/hbase/class-use/Cell.html |   208 +-
 .../hbase/class-use/HTableDescriptor.html   |12 +-
 .../hadoop/hbase/class-use/TableName.html   |   130 +-
 .../hadoop/hbase/client/Admin.CompactType.html  |   328 -
 .../hbase/client/Admin.MasterSwitchType.html|   325 -
 .../org/apache/hadoop/hbase/client/Admin.html   |   496 +-
 .../org/apache/hadoop/hbase/client/Append.html  | 4 +-
 .../hbase/client/BufferedMutatorParams.html | 4 +-
 .../apache/hadoop/hbase/client/CompactType.html |   324 +
 .../hadoop/hbase/client/CompactionState.html|   346 +
 .../apache/hadoop/hbase/client/Connection.html  | 4 +-
 .../apache/hadoop/hbase/client/Consistency.html | 4 +-
 .../apache/hadoop/hbase/client/Durability.html  | 4 +-
 .../hadoop/hbase/client/IsolationLevel.html | 8 +-
 .../hadoop/hbase/client/MasterSwitchType.html   |   322 +
 .../apache/hadoop/hbase/client/Mutation.html| 4 +-
 .../hadoop/hbase/client/RpcRetryingCaller.html  | 2 +-
 .../hbase/client/ScannerTimeoutException.html   | 4 +-
 .../hbase/client/SnapshotDescription.html   |   411 +
 .../hadoop/hbase/client/SnapshotType.html   |   334 +
 .../org/apache/hadoop/hbase/client/Table.html   | 4 +-
 .../client/class-use/Admin.CompactType.html |   212 -
 .../class-use/Admin.MasterSwitchType.html   |   190 -
 .../hadoop/hbase/client/class-use/Admin.html| 6 +-
 .../hbase/client/class-use/CompactType.html |   212 +
 .../hbase/client/class-use/CompactionState.html |   185 +
 .../hbase/client/class-use/Consistency.html |10 +-
 .../hbase/client/class-use/Durability.html  |12 +-
 .../hbase/client/class-use/IsolationLevel.html  |10 +-
 .../client/class-use/MasterSwitchType.html  |   190 +
 .../hadoop/hbase/client/class-use/Mutation.html | 8 +-
 .../hbase/client/class-use/RegionLocator.html   | 6 +-
 .../hadoop/hbase/client/class-use/Result.html   |32 +-
 .../hadoop/hbase/client/class-use/Row.html  | 6 +-
 .../hadoop/hbase/client/class-use/Scan.html | 4 +-
 .../client/class-use/SnapshotDescription.html   |   326 +
 .../hbase/client/class-use/SnapshotType.html|   214 +
 .../hadoop/hbase/client/class-use/Table.html| 4 +-
 .../hadoop/hbase/client/package-frame.html  | 7 +-
 .../hadoop/hbase/client/package-summary.html|32 +-
 .../hadoop/hbase/client/package-tree.html   | 9 +-
 .../apache/hadoop/hbase/client/package-use.html |84 +-
 .../hbase/filter/CompareFilter.CompareOp.html   | 4 +-
 .../filter/class-use/Filter.ReturnCode.html |60 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |48 +-
 .../hadoop/hbase/filter/package-tree.html   | 4 +-
 .../io/class-use/ImmutableBytesWritable.html|52 +-
 .../hadoop/hbase/io/class-use/TimeRange.html| 8 +-
 .../hbase/io/crypto/class-use/Cipher.html   | 8 +-
 .../hbase/io/encoding/DataBlockEncoding.html| 4 +-
 .../hbase/mapreduce/TableInputFormatBase.html   |36 +-
 .../hadoop/hbase/mapreduce/WALPlayer.html   |10 +-
 .../org/apache/hadoop/hbase/package-frame.html  | 1 +
 .../apache/hadoop/hbase/package-summary.html|52 +-
 .../org/apache/hadoop/hbase/package-tree.html   | 1 +
 .../hadoop/hbase/quotas/package-tree.html   | 4 +-
 .../snapshot/CorruptedSnapshotException.html| 8 +-
 .../hbase/snapshot/HBaseSnapshotException.html  |20 +-
 .../snapshot/RestoreSnapshotException.html  |16 +-
 .../snapshot/SnapshotCreationException.html |16 

[46/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/index-all.html
--
diff --git a/apidocs/index-all.html b/apidocs/index-all.html
index 9337528..d1fd565 100644
--- a/apidocs/index-all.html
+++ b/apidocs/index-all.html
@@ -368,14 +368,6 @@
 
 The administrative API for HBase.
 
-Admin.CompactType - Enum in org.apache.hadoop.hbase.client
-
-Currently, there are only two compact types:
- NORMAL means do store files compaction;
- MOB means do mob files compaction.
-
-Admin.MasterSwitchType - Enum in org.apache.hadoop.hbase.client
-
 ADMIN_QOS 
- Static variable in class org.apache.hadoop.hbase.HConstants
 
 advance()
 - Method in class org.apache.hadoop.hbase.client.Result
@@ -836,6 +828,14 @@
 
 callBlockingMethod(Descriptors.MethodDescriptor,
 RpcController, Message, Message) - Method in class 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel
 
+CallDroppedException - Exception in org.apache.hadoop.hbase
+
+Returned to the clients when their request was discarded 
due to server being overloaded.
+
+CallDroppedException()
 - Constructor for exception org.apache.hadoop.hbase.CallDroppedException
+
+CallDroppedException(String)
 - Constructor for exception org.apache.hadoop.hbase.CallDroppedException
+
 CallerDisconnectedException - Exception in org.apache.hadoop.hbase.ipc
 
 Exception indicating that the remote host making this IPC 
lost its
@@ -1270,11 +1270,11 @@
 
 Compact a column family within a table.
 
-compact(TableName,
 Admin.CompactType) - Method in interface 
org.apache.hadoop.hbase.client.Admin
+compact(TableName,
 CompactType) - Method in interface 
org.apache.hadoop.hbase.client.Admin
 
 Compact a table.
 
-compact(TableName,
 byte[], Admin.CompactType) - Method in interface 
org.apache.hadoop.hbase.client.Admin
+compact(TableName,
 byte[], CompactType) - Method in interface 
org.apache.hadoop.hbase.client.Admin
 
 Compact a column family within a table.
 
@@ -1289,6 +1289,10 @@
 
 COMPACTION_KV_MAX_DEFAULT
 - Static variable in class org.apache.hadoop.hbase.HConstants
 
+CompactionState - Enum in org.apache.hadoop.hbase.client
+
+POJO representing the compaction state
+
 compactRegion(byte[])
 - Method in interface org.apache.hadoop.hbase.client.Admin
 
 Compact an individual region.
@@ -1301,6 +1305,12 @@
 
 Compact all regions on the region server
 
+CompactType - 
Enum in org.apache.hadoop.hbase.client
+
+Currently, there are only two compact types:
+ NORMAL means do store files compaction;
+ MOB means do mob files compaction.
+
 comparator
 - Variable in class org.apache.hadoop.hbase.filter.CompareFilter
 
 comparator
 - Variable in class org.apache.hadoop.hbase.filter.SingleColumnValueFilter
@@ -1767,7 +1777,7 @@
 
 CorruptedSnapshotException(String,
 Exception) - Constructor for exception 
org.apache.hadoop.hbase.snapshot.CorruptedSnapshotException
 
-CorruptedSnapshotException(String,
 HBaseProtos.SnapshotDescription) - Constructor for exception 
org.apache.hadoop.hbase.snapshot.CorruptedSnapshotException
+CorruptedSnapshotException(String,
 SnapshotDescription) - Constructor for exception 
org.apache.hadoop.hbase.snapshot.CorruptedSnapshotException
 
 Snapshot was corrupt for some reason
 
@@ -1781,6 +1791,8 @@
 
 Counter(long)
 - Constructor for class org.apache.hadoop.hbase.util.Counter
 
+CP_HTD_ATTR_INCLUSION_KEY
 - Static variable in class org.apache.hadoop.hbase.HConstants
+
 CP_HTD_ATTR_KEY_PATTERN
 - Static variable in class org.apache.hadoop.hbase.HConstants
 
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
 - Static variable in class org.apache.hadoop.hbase.HConstants
@@ -4627,7 +4639,7 @@
 
 Get the current compaction state of a table.
 
-getCompactionState(TableName,
 Admin.CompactType) - Method in interface 
org.apache.hadoop.hbase.client.Admin
+getCompactionState(TableName,
 CompactType) - Method in interface 
org.apache.hadoop.hbase.client.Admin
 
 Get the current compaction state of a table.
 
@@ -4741,6 +4753,8 @@
 
 getCount()
 - Method in class org.apache.hadoop.hbase.util.FastLongHistogram
 
+getCreationTime()
 - Method in class org.apache.hadoop.hbase.client.SnapshotDescription
+
 getCurrent()
 - Static method in class org.apache.hadoop.hbase.security.User
 
 Returns the User instance within current 
execution context.
@@ -5343,6 +5357,8 @@
 
 getName()
 - Method in enum org.apache.hadoop.hbase.client.security.SecurityCapability
 
+getName()
 - Method in class org.apache.hadoop.hbase.client.SnapshotDescription
+
 getName()
 - Method in interface org.apache.hadoop.hbase.client.Table
 
 Gets the fully qualified table name instance of this 
table.
@@ -5535,6 +5551,8 @@
 
 getOverallAverageLatency()
 - Method in class org.apache.hadoop.hbase.client.HTableMultiplexer.HTableMultiplexerStatus
 
+getOwner()
 - Method in class org.apache.hadoop.hbase.client.SnapshotDescription
+
 getOwnerString()
 - Method in class org.apache.hadoop.hbase.HTableDescriptor
 
 Deprecated.
@@ -5996,7 +6014,7 @@
 
 

[08/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 9b69212..8f0559d 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -248,11 +248,11 @@
 
 
 ServerName
-SplitLogTask.getServerName()
+Server.getServerName()
 
 
 ServerName
-Server.getServerName()
+SplitLogTask.getServerName()
 
 
 static ServerName
@@ -591,11 +591,11 @@
 
 
 private ServerName
-AsyncProcess.AsyncRequestFutureImpl.SingleServerRequestRunnable.server
+FastFailInterceptorContext.server
 
 
 private ServerName
-FastFailInterceptorContext.server
+AsyncProcess.AsyncRequestFutureImpl.SingleServerRequestRunnable.server
 
 
 private ServerName
@@ -742,28 +742,28 @@
 
 
 void
+ConnectionImplementation.clearCaches(ServerNameserverName)
+
+
+void
 HConnection.clearCaches(ServerNamesn)
 Deprecated.
 internal method, do not use thru HConnection
 
 
 
-
+
 void
-ConnectionImplementation.clearCaches(ServerNameserverName)
+HBaseAdmin.closeRegion(ServerNamesn,
+  HRegionInfohri)
 
-
+
 void
 Admin.closeRegion(ServerNamesn,
   HRegionInfohri)
 Close a region.
 
 
-
-void
-HBaseAdmin.closeRegion(ServerNamesn,
-  HRegionInfohri)
-
 
 private void
 HBaseAdmin.compact(ServerNamesn,
@@ -773,29 +773,29 @@
 
 
 void
-Admin.compactRegionServer(ServerNamesn,
+HBaseAdmin.compactRegionServer(ServerNamesn,
   booleanmajor)
 Compact all regions on the region server
 
 
 
 void
-HBaseAdmin.compactRegionServer(ServerNamesn,
+Admin.compactRegionServer(ServerNamesn,
   booleanmajor)
 Compact all regions on the region server
 
 
 
 CoprocessorRpcChannel
+HBaseAdmin.coprocessorService(ServerNamesn)
+
+
+CoprocessorRpcChannel
 Admin.coprocessorService(ServerNamesn)
 Creates and returns a RpcChannel instance
  connected to the passed region server.
 
 
-
-CoprocessorRpcChannel
-HBaseAdmin.coprocessorService(ServerNamesn)
-
 
 protected MultiServerCallableRow
 AsyncProcess.createCallable(ServerNameserver,
@@ -844,17 +844,22 @@
 
 
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
+ConnectionImplementation.getAdmin(ServerNameserverName)
+
+
+org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
 HConnection.getAdmin(ServerNameserverName)
 Deprecated.
 internal method, do not use thru HConnection
 
 
 
-
+
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-ConnectionImplementation.getAdmin(ServerNameserverName)
+ConnectionImplementation.getAdmin(ServerNameserverName,
+booleanmaster)
 
-
+
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
 HConnection.getAdmin(ServerNameserverName,
 booleangetMaster)
@@ -863,11 +868,6 @@
 
 
 
-
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-ConnectionImplementation.getAdmin(ServerNameserverName,
-booleanmaster)
-
 
 private http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 AsyncProcess.AsyncRequestFutureImpl.getBackoff(ServerNameserver,
@@ -882,16 +882,16 @@
 
 
 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
+ConnectionImplementation.getClient(ServerNamesn)
+
+
+org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
 HConnection.getClient(ServerNameserverName)
 Deprecated.
 internal method, do not use thru HConnection
 
 
 
-
-org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
-ConnectionImplementation.getClient(ServerNamesn)
-
 
 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
 CoprocessorHConnection.getClient(ServerNameserverName)
@@ -904,13 +904,13 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
-Admin.getOnlineRegions(ServerNamesn)
-Get all the online regions on a region server.
-
+HBaseAdmin.getOnlineRegions(ServerNamesn)
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
-HBaseAdmin.getOnlineRegions(ServerNamesn)
+Admin.getOnlineRegions(ServerNamesn)
+Get all the online regions on a region server.
+
 
 
 (package private) ServerStatistics
@@ -949,30 +949,30 @@
 
 
 boolean
-ClusterStatusListener.isDeadServer(ServerNamesn)
-Check if we know if a server is dead.
-
-
-
-boolean
 

[49/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index c79898f..f4c0f87 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,24 +5,24 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
 /Producer (Apache HBase Team)
-/CreationDate (D:20160428150458+00'00')
-/ModDate (D:20160428150458+00'00')
+/CreationDate (D:20160506145232+00'00')
+/ModDate (D:20160506145232+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
 /Names 25 0 R
-/Outlines 3951 0 R
-/PageLabels 4153 0 R
+/Outlines 3961 0 R
+/PageLabels 4163 0 R
 /PageMode /UseOutlines
 /ViewerPreferences [/FitWindow]
 >>
 endobj
 3 0 obj
 << /Type /Pages
-/Count 652
-/Kids [7 0 R 13 0 R 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 39 0 R 43 0 R 47 0 R 58 
0 R 62 0 R 64 0 R 66 0 R 68 0 R 75 0 R 78 0 R 80 0 R 85 0 R 88 0 R 90 0 R 92 0 
R 101 0 R 107 0 R 112 0 R 114 0 R 130 0 R 135 0 R 142 0 R 145 0 R 148 0 R 157 0 
R 168 0 R 184 0 R 188 0 R 192 0 R 194 0 R 198 0 R 204 0 R 206 0 R 208 0 R 210 0 
R 212 0 R 215 0 R 221 0 R 223 0 R 225 0 R 227 0 R 229 0 R 231 0 R 233 0 R 235 0 
R 239 0 R 241 0 R 245 0 R 247 0 R 249 0 R 251 0 R 253 0 R 255 0 R 257 0 R 259 0 
R 266 0 R 268 0 R 270 0 R 272 0 R 275 0 R 279 0 R 284 0 R 289 0 R 292 0 R 296 0 
R 311 0 R 322 0 R 329 0 R 339 0 R 350 0 R 355 0 R 357 0 R 359 0 R 370 0 R 375 0 
R 378 0 R 383 0 R 387 0 R 398 0 R 410 0 R 425 0 R 431 0 R 433 0 R 435 0 R 442 0 
R 453 0 R 464 0 R 475 0 R 478 0 R 481 0 R 485 0 R 489 0 R 492 0 R 495 0 R 497 0 
R 500 0 R 504 0 R 506 0 R 510 0 R 519 0 R 521 0 R 527 0 R 529 0 R 533 0 R 541 0 
R 543 0 R 546 0 R 549 0 R 552 0 R 555 0 R 570 0 R 577 0 R 584 0 R 595 0 R 602 0 
R 610 0 R 618 0 R 621 0 R 625 0 R 628 0
  R 640 0 R 648 0 R 654 0 R 659 0 R 663 0 R 665 0 R 680 0 R 692 0 R 698 0 R 705 
0 R 708 0 R 716 0 R 724 0 R 729 0 R 734 0 R 739 0 R 741 0 R 743 0 R 745 0 R 753 
0 R 762 0 R 766 0 R 773 0 R 781 0 R 787 0 R 791 0 R 798 0 R 802 0 R 807 0 R 815 
0 R 817 0 R 821 0 R 832 0 R 837 0 R 839 0 R 842 0 R 846 0 R 852 0 R 855 0 R 867 
0 R 871 0 R 876 0 R 884 0 R 889 0 R 893 0 R 897 0 R 899 0 R 902 0 R 904 0 R 908 
0 R 910 0 R 913 0 R 918 0 R 922 0 R 927 0 R 931 0 R 938 0 R 942 0 R 947 0 R 960 
0 R 964 0 R 968 0 R 973 0 R 975 0 R 984 0 R 987 0 R 992 0 R 995 0 R 1004 0 R 
1007 0 R 1013 0 R 1020 0 R 1023 0 R 1025 0 R 1034 0 R 1036 0 R 1038 0 R 1041 0 
R 1043 0 R 1045 0 R 1047 0 R 1049 0 R 1051 0 R 1054 0 R 1057 0 R 1062 0 R 1065 
0 R 1067 0 R 1069 0 R 1071 0 R 1076 0 R 1085 0 R 1088 0 R 1090 0 R 1092 0 R 
1097 0 R 1099 0 R 1102 0 R 1104 0 R 1106 0 R 1108 0 R  0 R 1117 0 R 1122 0 
R 1129 0 R 1134 0 R 1148 0 R 1159 0 R 1163 0 R 1176 0 R 1185 0 R 1201 0 R 1205 
0 R 1215 0 R 1228 0 R 1231 0 R 1243 0 R 1252 0 R 
 1260 0 R 1264 0 R 1273 0 R 1278 0 R 1282 0 R 1288 0 R 1294 0 R 1301 0 R 1309 0 
R 1311 0 R 1322 0 R 1324 0 R 1329 0 R 1333 0 R 1338 0 R 1348 0 R 1354 0 R 1360 
0 R 1362 0 R 1364 0 R 1377 0 R 1384 0 R 1393 0 R 1399 0 R 1413 0 R 1421 0 R 
1425 0 R 1434 0 R 1442 0 R 1450 0 R 1456 0 R 1460 0 R 1463 0 R 1465 0 R 1474 0 
R 1477 0 R 1483 0 R 1489 0 R 1492 0 R 1495 0 R 1497 0 R 1505 0 R 1512 0 R 1518 
0 R 1523 0 R 1527 0 R 1530 0 R 1536 0 R 1541 0 R 1546 0 R 1548 0 R 1550 0 R 
1553 0 R 1555 0 R 1564 0 R 1567 0 R 1573 0 R 1580 0 R 1584 0 R 1589 0 R 1592 0 
R 1594 0 R 1599 0 R 1602 0 R 1604 0 R 1606 0 R 1608 0 R 1615 0 R 1625 0 R 1630 
0 R 1637 0 R 1641 0 R 1643 0 R 1645 0 R 1647 0 R 1650 0 R 1652 0 R 1654 0 R 
1656 0 R 1660 0 R 1664 0 R 1673 0 R 1675 0 R 1677 0 R 1679 0 R 1681 0 R 1687 0 
R 1689 0 R 1694 0 R 1696 0 R 1698 0 R 1705 0 R 1710 0 R 1714 0 R 1718 0 R 1721 
0 R 1724 0 R 1728 0 R 1730 0 R 1733 0 R 1735 0 R 1737 0 R 1739 0 R 1743 0 R 
1745 0 R 1749 0 R 1751 0 R 1753 0 R 1755 0 R 1757 0 R 1761 0 
 R 1764 0 R 1766 0 R 1768 0 R 1776 0 R 1786 0 R 1789 0 R 1804 0 R 1819 0 R 1823 
0 R 1828 0 R 1831 0 R 1834 0 R 1839 0 R 1841 0 R 1849 0 R 1851 0 R 1854 0 R 
1856 0 R 1858 0 R 1860 0 R 1862 0 R 1866 0 R 1868 0 R 1877 0 R 1884 0 R 1890 0 
R 1902 0 R 1916 0 R 1927 0 R 1947 0 R 1949 0 R 1951 0 R 1955 0 R 1972 0 R 1980 
0 R 1987 0 R 1996 0 R 2001 0 R 2011 0 R 2021 0 R 2026 0 R 2035 0 R 2048 0 R 
2065 0 R 2075 0 R 2078 0 R 2087 0 R 2102 0 R 2109 0 R 2112 0 R 2117 0 R 2122 0 
R 2132 0 R 2140 0 R 2143 0 R 2145 0 R 2149 0 R 2162 0 R 2170 0 R 2176 0 R 2180 
0 R 2183 0 R 2185 0 R 2187 0 R 2189 0 R 2191 0 R 2196 0 R 2198 0 R 2208 0 R 
2218 0 R 2225 0 R 2237 0 R 2242 0 R 2246 0 R 2259 0 R 2266 0 R 2272 0 R 2274 0 
R 2285 0 R 2292 0 R 2303 0 R 2307 0 R 2316 0 R 2322 0 R 2332 0 R 2340 0 R 2348 
0 R 2354 0 R 2359 0 R 2363 0 R 2366 0 R 2368 0 R 2375 0 R 2379 0 R 2383 0 R 
2389 0 R 2396 0 R 2401 0 R 2405 0 R 2415 0 R 2420 0 R 2425 0 R 2438 0 R 2445 0 
R 2449 0 R 2454 0 R 2461 0 R 2465 0 R 2470 0 R 2478 0 R 2484 
 0 R 2486 0 R 2493 0 R 2500 0 R 2507 0 R 

[41/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/client/Append.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Append.html 
b/apidocs/org/apache/hadoop/hbase/client/Append.html
index d6e58b7..28cd5b1 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Append.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Append.html
@@ -35,7 +35,7 @@
 
 
 
-Prev 
Class
+Prev 
Class
 Next 
Class
 
 
@@ -580,7 +580,7 @@ extends 
 
 
-Prev 
Class
+Prev 
Class
 Next 
Class
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/client/BufferedMutatorParams.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/BufferedMutatorParams.html 
b/apidocs/org/apache/hadoop/hbase/client/BufferedMutatorParams.html
index 40f70fe..c322b78 100644
--- a/apidocs/org/apache/hadoop/hbase/client/BufferedMutatorParams.html
+++ b/apidocs/org/apache/hadoop/hbase/client/BufferedMutatorParams.html
@@ -36,7 +36,7 @@
 
 
 Prev 
Class
-Next 
Class
+Next 
Class
 
 
 Frames
@@ -340,7 +340,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 Prev 
Class
-Next 
Class
+Next 
Class
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/client/CompactType.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/CompactType.html 
b/apidocs/org/apache/hadoop/hbase/client/CompactType.html
new file mode 100644
index 000..a54b368
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/client/CompactType.html
@@ -0,0 +1,324 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+CompactType (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary:
+Nested|
+Enum Constants|
+Field|
+Method
+
+
+Detail:
+Enum Constants|
+Field|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Enum CompactType
+
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">java.lang.EnumCompactType
+
+
+org.apache.hadoop.hbase.client.CompactType
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable, http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableCompactType
+
+
+
+@InterfaceAudience.Public
+@InterfaceStability.Unstable
+public enum CompactType
+extends http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumCompactType
+Currently, there are only two compact types:
+ NORMAL means do store files compaction;
+ MOB means do mob files compaction.
+
+
+
+
+
+
+
+
+
+
+
+Enum Constant Summary
+
+Enum Constants
+
+Enum Constant and Description
+
+
+MOB
+
+
+NORMAL
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods
+
+Modifier and Type
+Method and Description
+
+
+static CompactType
+valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+Returns the enum constant of this type with the specified 
name.
+
+
+
+static CompactType[]
+values()
+Returns an array containing the constants of this enum 
type, in
+the order they are declared.
+
+
+
+
+
+
+
+Methods inherited from classjava.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">Enum
+http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true#clone()"
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true#compareTo(E)"
 title="class or interface in java.lang">compareTo, http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true#equals(java.lang.Object)"
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true#finalize()"
 title="class or interface in 

[10/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
index 1689c47..38c22d9 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
@@ -166,13 +166,13 @@
 
 
 
-protected InterProcessLock.MetadataHandler
-ZKInterProcessLockBase.handler
-
-
 private InterProcessLock.MetadataHandler
 ZKInterProcessReadWriteLock.handler
 
+
+protected InterProcessLock.MetadataHandler
+ZKInterProcessLockBase.handler
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
index 4f4468b..e9a6b35 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
@@ -159,14 +159,14 @@ the order they are declared.
 
 
 private KeepDeletedCells
-ScanInfo.keepDeletedCells
-
-
-private KeepDeletedCells
 ScanQueryMatcher.keepDeletedCells
 whether to return deleted rows
 
 
+
+private KeepDeletedCells
+ScanInfo.keepDeletedCells
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
index 3b95f52..4cec977 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
@@ -85,40 +85,33 @@
 
 
 
-org.apache.hadoop.hbase.io.hfile
-
-Provides implementations of HFile and HFile
- BlockCache.
-
-
-
 org.apache.hadoop.hbase.mapreduce
 
 Provides HBase http://wiki.apache.org/hadoop/HadoopMapReduce;>MapReduce
 Input/OutputFormats, a table indexing MapReduce job, and utility methods.
 
 
-
+
 org.apache.hadoop.hbase.mob
 
 
-
+
 org.apache.hadoop.hbase.mob.mapreduce
 
 
-
+
 org.apache.hadoop.hbase.regionserver
 
 
-
+
 org.apache.hadoop.hbase.regionserver.wal
 
 
-
+
 org.apache.hadoop.hbase.security.access
 
 
-
+
 org.apache.hadoop.hbase.util.test
 
 
@@ -608,28 +601,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-
-
-
-Uses of KeyValue in org.apache.hadoop.hbase.io.hfile
-
-Subclasses of KeyValue in org.apache.hadoop.hbase.io.hfile
-
-Modifier and Type
-Class and Description
-
-
-
-private static class
-HFileReaderImpl.HFileScannerImpl.ShareableMemoryKeyValue
-
-
-private static class
-HFileReaderImpl.HFileScannerImpl.ShareableMemoryNoTagsKeyValue
-
-
-
-
 
 
 
@@ -814,10 +785,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFile
-StripeStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValuetargetKey)
-See StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue)
- for details on this methods.
-
+DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValuetargetKey)
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFile
@@ -827,17 +795,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFile
-DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValuetargetKey)
+StripeStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValuetargetKey)
+See StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue)
+ for details on this methods.
+
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFile
-StripeStoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFilecandidateFiles,
+DefaultStoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFilecandidateFiles,
  

[36/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.html 
b/apidocs/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.html
index 7fea0b4..b86d9a2 100644
--- a/apidocs/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.html
+++ b/apidocs/org/apache/hadoop/hbase/snapshot/SnapshotCreationException.html
@@ -159,15 +159,15 @@ extends 
-SnapshotCreationException(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringmsg,
-  
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptiondesc)
+SnapshotCreationException(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringmsg,
+  SnapshotDescriptiondesc)
 Failure to create the specified snapshot
 
 
 
-SnapshotCreationException(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringmsg,
+SnapshotCreationException(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringmsg,
   http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwablecause,
-  
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptiondesc)
+  SnapshotDescriptiondesc)
 Failure to create the specified snapshot due to an external 
cause
 
 
@@ -226,19 +226,19 @@ extends Parameters:msg - 
error message to pass back
 
 
-
+
 
 
 
 
 SnapshotCreationException
 publicSnapshotCreationException(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringmsg,
- 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptiondesc)
+ SnapshotDescriptiondesc)
 Failure to create the specified snapshot
 Parameters:msg - 
reason why the snapshot couldn't be completeddesc - 
description of the snapshot attempted
 
 
-
+
 
 
 
@@ -246,7 +246,7 @@ extends SnapshotCreationException(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringmsg,
  http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwablecause,
- 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptiondesc)
+ SnapshotDescriptiondesc)
 Failure to create the specified snapshot due to an external 
cause
 Parameters:msg - 
reason why the snapshot couldn't be completedcause - root 
cause of the failuredesc - description of the snapshot 
attempted
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.html 
b/apidocs/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.html
index 9f008bc..bf54b46 100644
--- 
a/apidocs/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.html
+++ 
b/apidocs/org/apache/hadoop/hbase/snapshot/SnapshotDoesNotExistException.html
@@ -153,7 +153,7 @@ extends Constructor and Description
 
 
-SnapshotDoesNotExistException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptiondesc)
+SnapshotDoesNotExistException(SnapshotDescriptiondesc)
 
 
 SnapshotDoesNotExistException(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringmsg)
@@ -212,13 +212,13 @@ extends Parameters:msg - 
full description of the failure
 
 
-
+
 
 
 
 
 SnapshotDoesNotExistException
-publicSnapshotDoesNotExistException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptiondesc)
+publicSnapshotDoesNotExistException(SnapshotDescriptiondesc)
 Parameters:desc - 
expected snapshot to find
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.html 
b/apidocs/org/apache/hadoop/hbase/snapshot/SnapshotExistsException.html
index 

[44/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html 
b/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
index b9fd2a6..2f68093 100644
--- a/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
+++ b/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
@@ -249,7 +249,7 @@ the order they are declared.
 
 
 values
-public staticKeepDeletedCells[]values()
+public staticKeepDeletedCells[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -266,7 +266,7 @@ for (KeepDeletedCells c : KeepDeletedCells.values())
 
 
 valueOf
-public staticKeepDeletedCellsvalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticKeepDeletedCellsvalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/class-use/CallDroppedException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/class-use/CallDroppedException.html 
b/apidocs/org/apache/hadoop/hbase/class-use/CallDroppedException.html
new file mode 100644
index 000..5a11f7a
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/class-use/CallDroppedException.html
@@ -0,0 +1,115 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+Uses of Class org.apache.hadoop.hbase.CallDroppedException (Apache 
HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of Classorg.apache.hadoop.hbase.CallDroppedException
+
+No usage of 
org.apache.hadoop.hbase.CallDroppedException
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+Copyright  20072016 http://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 554dfb3..a5333e3 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -919,17 +919,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Append
-Append.add(Cellcell)
-Add column and value to this Append operation.
-
-
-
 Put
 Put.add(Cellkv)
 Add the specified KeyValue to this Put operation.
 
 
+
+Append
+Append.add(Cellcell)
+Add column and value to this Append operation.
+
+
 
 Delete
 Delete.addDeleteMarker(Cellkv)
@@ -1011,13 +1011,13 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 
 
-Append
-Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
-
-
 Delete
 Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in 

[47/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/constant-values.html
--
diff --git a/apidocs/constant-values.html b/apidocs/constant-values.html
index 820a853..1761045 100644
--- a/apidocs/constant-values.html
+++ b/apidocs/constant-values.html
@@ -569,1763 +569,1770 @@
 "corrupt"
 
 
+
+
+publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+CP_HTD_ATTR_INCLUSION_KEY
+"hbase.coprocessor.classloader.included.classes"
+
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN
 "[^=,]+"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN
 "[^,]+"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_ALTERNATE_KEY_ALGORITHM_CONF_KEY
 "hbase.crypto.alternate.key.algorithm"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_CIPHERPROVIDER_CONF_KEY
 "hbase.crypto.cipherprovider"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEY_ALGORITHM_CONF_KEY
 "hbase.crypto.key.algorithm"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEYPROVIDER_CONF_KEY
 "hbase.crypto.keyprovider"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_KEYPROVIDER_PARAMETERS_KEY
 "hbase.crypto.keyprovider.parameters"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_MASTERKEY_ALTERNATE_NAME_CONF_KEY
 "hbase.crypto.master.alternate.key.name"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_MASTERKEY_NAME_CONF_KEY
 "hbase.crypto.master.key.name"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_WAL_ALGORITHM_CONF_KEY
 "hbase.crypto.wal.algorithm"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 CRYPTO_WAL_KEY_NAME_CONF_KEY
 "hbase.crypto.wal.key.name"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 DATA_FILE_UMASK_KEY
 "hbase.data.umask"
 
-
+
 
 
 publicstaticfinalint
 DAY_IN_SECONDS
 86400
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_BLOCKSIZE
 65536
 
-
+
 
 
 publicstaticfinalboolean
 DEFAULT_CLUSTER_DISTRIBUTED
 false
 
-
+
 
 
 publicstaticfinalboolean
 DEFAULT_DISALLOW_WRITES_IN_RECOVERING_CONFIG
 false
 
-
+
 
 
 publicstaticfinalboolean
 DEFAULT_DISTRIBUTED_LOG_REPLAY_CONFIG
 false
 
-
+
 
 
 publicstaticfinalboolean
 DEFAULT_ENABLE_CLIENT_BACKPRESSURE
 false
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_CLIENT_MAX_PERREGION_TASKS
 1
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_CLIENT_MAX_PERSERVER_TASKS
 2
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_CLIENT_MAX_TOTAL_TASKS
 100
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_CLIENT_OPERATION_TIMEOUT
 120
 
-
+
 
 
 publicstaticfinallong
 DEFAULT_HBASE_CLIENT_PAUSE
 100L
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_CLIENT_RETRIES_NUMBER
 31
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_CLIENT_SCANNER_CACHING
 2147483647
 
-
+
 
 
 publicstaticfinallong
 DEFAULT_HBASE_CLIENT_SCANNER_MAX_RESULT_SIZE
 2097152L
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD
 6
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_META_BLOCK_SIZE
 8192
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_META_SCANNER_CACHING
 100
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_META_VERSIONS
 3
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_RPC_SHORTOPERATION_TIMEOUT
 1
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_RPC_TIMEOUT
 6
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HBASE_SERVER_PAUSE
 1000
 
-
+
 
 
 publicstaticfinallong
 DEFAULT_HBASE_SERVER_SCANNER_MAX_RESULT_SIZE
 104857600L
 
-
+
 
 
 publicstaticfinalint
 DEFAULT_HEALTH_FAILURE_THRESHOLD
 3
 
-
+
 
 
 publicstaticfinallong
 

[12/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
index 3ad7a9a..442ab80 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
@@ -266,11 +266,11 @@ service.
 
 
 protected HRegionLocation
-RegionServerCallable.location
+RegionAdminServiceCallable.location
 
 
 protected HRegionLocation
-RegionAdminServiceCallable.location
+RegionServerCallable.location
 
 
 
@@ -310,32 +310,38 @@ service.
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[]row)
+RegionLocator.getRegionLocation(byte[]row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[]row)
+HRegionLocator.getRegionLocation(byte[]row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[]row,
+RegionLocator.getRegionLocation(byte[]row,
   booleanreload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[]row,
+HRegionLocator.getRegionLocation(byte[]row,
   booleanreload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
+ConnectionImplementation.getRegionLocation(byte[]tableName,
+  byte[]row,
+  booleanreload)
+
+
+HRegionLocation
 HConnection.getRegionLocation(byte[]tableName,
   byte[]row,
   booleanreload)
@@ -344,12 +350,6 @@ service.
 
 
 
-
-HRegionLocation
-ConnectionImplementation.getRegionLocation(byte[]tableName,
-  byte[]row,
-  booleanreload)
-
 
 HRegionLocation
 ClusterConnection.getRegionLocation(TableNametableName,
@@ -360,6 +360,12 @@ service.
 
 
 HRegionLocation
+ConnectionImplementation.getRegionLocation(TableNametableName,
+  byte[]row,
+  booleanreload)
+
+
+HRegionLocation
 HConnection.getRegionLocation(TableNametableName,
   byte[]row,
   booleanreload)
@@ -368,12 +374,6 @@ service.
 
 
 
-
-HRegionLocation
-ConnectionImplementation.getRegionLocation(TableNametableName,
-  byte[]row,
-  booleanreload)
-
 
 private HRegionLocation
 AsyncProcess.AsyncRequestFutureImpl.getReplicaLocationOrFail(ActionRowaction)
@@ -386,17 +386,22 @@ service.
 
 
 HRegionLocation
+ConnectionImplementation.locateRegion(byte[]regionName)
+
+
+HRegionLocation
 HConnection.locateRegion(byte[]regionName)
 Deprecated.
 internal method, do not use thru HConnection
 
 
 
-
+
 HRegionLocation
-ConnectionImplementation.locateRegion(byte[]regionName)
+ConnectionImplementation.locateRegion(byte[]tableName,
+byte[]row)
 
-
+
 HRegionLocation
 HConnection.locateRegion(byte[]tableName,
 byte[]row)
@@ -405,11 +410,6 @@ service.
 
 
 
-
-HRegionLocation
-ConnectionImplementation.locateRegion(byte[]tableName,
-byte[]row)
-
 
 HRegionLocation
 ClusterConnection.locateRegion(TableNametableName,
@@ -420,6 +420,11 @@ service.
 
 
 HRegionLocation
+ConnectionImplementation.locateRegion(TableNametableName,
+byte[]row)
+
+
+HRegionLocation
 HConnection.locateRegion(TableNametableName,
 byte[]row)
 Deprecated.
@@ -427,12 +432,12 @@ service.
 
 
 
-
+
 HRegionLocation
-ConnectionImplementation.locateRegion(TableNametableName,
-byte[]row)
+ConnectionImplementation.relocateRegion(byte[]tableName,
+byte[]row)
 
-
+
 HRegionLocation
 HConnection.relocateRegion(byte[]tableName,
 byte[]row)
@@ -441,11 +446,6 @@ service.
 
 
 
-
-HRegionLocation
-ConnectionImplementation.relocateRegion(byte[]tableName,
-byte[]row)
-
 
 HRegionLocation
 ClusterConnection.relocateRegion(TableNametableName,
@@ -456,6 +456,11 @@ service.
 
 
 HRegionLocation
+ConnectionImplementation.relocateRegion(TableNametableName,
+byte[]row)
+
+
+HRegionLocation
 HConnection.relocateRegion(TableNametableName,
 byte[]row)
 Deprecated.
@@ -463,11 +468,6 @@ service.
 
 
 
-
-HRegionLocation
-ConnectionImplementation.relocateRegion(TableNametableName,
-byte[]row)
-
 
 
 
@@ -479,14 +479,14 @@ service.
 
 
 

[21/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/deprecated-list.html
--
diff --git a/devapidocs/deprecated-list.html b/devapidocs/deprecated-list.html
index 40ec7f0..cb6577a 100644
--- a/devapidocs/deprecated-list.html
+++ b/devapidocs/deprecated-list.html
@@ -299,6 +299,12 @@
 
 
 
+org.apache.hadoop.hbase.client.HBaseAdmin.addColumn(TableName,
 HColumnDescriptor)
+Since 2.0. Will be removed in 3.0. Use
+ HBaseAdmin.addColumnFamily(TableName,
 HColumnDescriptor) instead.
+
+
+
 org.apache.hadoop.hbase.client.Admin.addColumn(TableName,
 HColumnDescriptor)
 As of release 2.0.0.
  (https://issues.apache.org/jira/browse/HBASE-1989;>HBASE-1989).
@@ -306,12 +312,6 @@
  Use Admin.addColumnFamily(TableName,
 HColumnDescriptor).
 
 
-
-org.apache.hadoop.hbase.client.HBaseAdmin.addColumn(TableName,
 HColumnDescriptor)
-Since 2.0. Will be removed in 3.0. Use
- HBaseAdmin.addColumnFamily(TableName,
 HColumnDescriptor) instead.
-
-
 
 org.apache.hadoop.hbase.security.visibility.VisibilityClient.addLabel(Configuration,
 String)
 Use VisibilityClient.addLabel(Connection,String)
 instead.
@@ -404,6 +404,12 @@
 
 
 
+org.apache.hadoop.hbase.client.HBaseAdmin.deleteColumn(TableName,
 byte[])
+Since 2.0. Will be removed in 3.0. Use
+ HBaseAdmin.deleteColumnFamily(TableName,
 byte[]) instead.
+
+
+
 org.apache.hadoop.hbase.client.Admin.deleteColumn(TableName,
 byte[])
 As of release 2.0.0.
  (https://issues.apache.org/jira/browse/HBASE-1989;>HBASE-1989).
@@ -411,12 +417,6 @@
  Use Admin.deleteColumnFamily(TableName,
 byte[])}.
 
 
-
-org.apache.hadoop.hbase.client.HBaseAdmin.deleteColumn(TableName,
 byte[])
-Since 2.0. Will be removed in 3.0. Use
- HBaseAdmin.deleteColumnFamily(TableName,
 byte[]) instead.
-
-
 
 org.apache.hadoop.hbase.KeyValueUtil.ensureKeyValue(Cell)
 without any replacement.
@@ -444,15 +444,15 @@
 org.apache.hadoop.hbase.rest.client.RemoteHTable.exists(List)
 
 
-org.apache.hadoop.hbase.filter.FilterBase.filterRowKey(byte[],
 int, int)
+org.apache.hadoop.hbase.filter.Filter.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this will be removed in HBase 3.0.0.
- Instead use FilterBase.filterRowKey(Cell)
+ Instead use Filter.filterRowKey(Cell)
 
 
 
-org.apache.hadoop.hbase.filter.Filter.filterRowKey(byte[],
 int, int)
+org.apache.hadoop.hbase.filter.FilterBase.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this will be removed in HBase 3.0.0.
- Instead use Filter.filterRowKey(Cell)
+ Instead use FilterBase.filterRowKey(Cell)
 
 
 
@@ -539,49 +539,49 @@
 
 
 
-org.apache.hadoop.hbase.client.HConnection.getHTableDescriptor(byte[])
-internal method, do not use through HConnection
-
-
-
 org.apache.hadoop.hbase.client.ConnectionImplementation.getHTableDescriptor(byte[])
 Use Admin.getTableDescriptor(org.apache.hadoop.hbase.TableName)
   instead
 
 
-
-org.apache.hadoop.hbase.client.HConnection.getHTableDescriptor(TableName)
+
+org.apache.hadoop.hbase.client.HConnection.getHTableDescriptor(byte[])
 internal method, do not use through HConnection
 
 
-
+
 org.apache.hadoop.hbase.client.ConnectionImplementation.getHTableDescriptor(TableName)
 Use Admin.getTableDescriptor(org.apache.hadoop.hbase.TableName)
   instead
 
 
-
-org.apache.hadoop.hbase.client.HConnection.getHTableDescriptors(List)
-since 0.96.0
+
+org.apache.hadoop.hbase.client.HConnection.getHTableDescriptor(TableName)
+internal method, do not use through HConnection
 
 
-
+
 org.apache.hadoop.hbase.client.ConnectionImplementation.getHTableDescriptors(List)
 Use
   Admin.getTableDescriptorsByTableName(java.util.List)
   instead
 
 
-
-org.apache.hadoop.hbase.client.HConnection.getHTableDescriptorsByTableName(List)
-Use Admin.getTableDescriptor(TableName)
 instead.
+
+org.apache.hadoop.hbase.client.HConnection.getHTableDescriptors(List)
+since 0.96.0
 
 
-
+
 org.apache.hadoop.hbase.client.ConnectionImplementation.getHTableDescriptorsByTableName(List)
 Use Admin.getTableDescriptorsByTableName(java.util.List)
 instead
 
 
+
+org.apache.hadoop.hbase.client.HConnection.getHTableDescriptorsByTableName(List)
+Use Admin.getTableDescriptor(TableName)
 instead.
+
+
 
 org.apache.hadoop.hbase.client.ClusterConnection.getKeepAliveMasterService()
 Since 0.96.0
@@ -633,10 +633,10 @@
 
 
 
-org.apache.hadoop.hbase.http.HttpServer.getPort()
+org.apache.hadoop.hbase.http.InfoServer.getPort()
 
 
-org.apache.hadoop.hbase.http.InfoServer.getPort()
+org.apache.hadoop.hbase.http.HttpServer.getPort()
 
 
 org.apache.hadoop.hbase.CellUtil.getQualifierBufferShallowCopy(Cell)
@@ -654,22 +654,22 @@
 
 
 
-org.apache.hadoop.hbase.client.HConnection.getRegionCachePrefetch(byte[])
+org.apache.hadoop.hbase.client.ConnectionImplementation.getRegionCachePrefetch(byte[])
 always return false since 0.99
 
 
 

[30/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
index be61fef..0a195b3 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.html
@@ -49,1730 +49,1703 @@
 041import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 042import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
 043import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-044import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
-045import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
-046import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
-047import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-048import 
org.apache.hadoop.hbase.quotas.QuotaRetriever;
-049import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-050import 
org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
-051import 
org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
-052import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-053import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-054import 
org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
-055import 
org.apache.hadoop.hbase.util.Pair;
-056
-057/**
-058 * The administrative API for HBase. 
Obtain an instance from an {@link Connection#getAdmin()} and
-059 * call {@link #close()} afterwards.
-060 * pAdmin can be used to create, 
drop, list, enable and disable tables, add and drop table
-061 * column families and other 
administrative operations.
-062 *
-063 * @see ConnectionFactory
-064 * @see Connection
-065 * @see Table
-066 * @since 0.99.0
-067 */
-068@InterfaceAudience.Public
-069@InterfaceStability.Evolving
-070public interface Admin extends Abortable, 
Closeable {
-071  int getOperationTimeout();
+044import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
+045import 
org.apache.hadoop.hbase.quotas.QuotaRetriever;
+046import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
+047import 
org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
+048import 
org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
+049import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
+050import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
+051import 
org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
+052import 
org.apache.hadoop.hbase.util.Pair;
+053
+054/**
+055 * The administrative API for HBase. 
Obtain an instance from an {@link Connection#getAdmin()} and
+056 * call {@link #close()} afterwards.
+057 * pAdmin can be used to create, 
drop, list, enable and disable tables, add and drop table
+058 * column families and other 
administrative operations.
+059 *
+060 * @see ConnectionFactory
+061 * @see Connection
+062 * @see Table
+063 * @since 0.99.0
+064 */
+065@InterfaceAudience.Public
+066@InterfaceStability.Evolving
+067public interface Admin extends Abortable, 
Closeable {
+068  int getOperationTimeout();
+069
+070  @Override
+071  void abort(String why, Throwable e);
 072
 073  @Override
-074  void abort(String why, Throwable e);
+074  boolean isAborted();
 075
-076  @Override
-077  boolean isAborted();
-078
-079  /**
-080   * @return Connection used by this 
object.
-081   */
-082  Connection getConnection();
-083
-084  /**
-085   * @param tableName Table to check.
-086   * @return True if table exists 
already.
-087   * @throws IOException
-088   */
-089  boolean tableExists(final TableName 
tableName) throws IOException;
-090
-091  /**
-092   * List all the userspace tables.
-093   *
-094   * @return - returns an array of 
HTableDescriptors
-095   * @throws IOException if a remote or 
network exception occurs
-096   */
-097  HTableDescriptor[] listTables() throws 
IOException;
-098
-099  /**
-100   * List all the userspace tables 
matching the given pattern.
-101   *
-102   * @param pattern The compiled regular 
expression to match against
-103   * @return - returns an array of 
HTableDescriptors
-104   * @throws IOException if a remote or 
network exception occurs
-105   * @see #listTables()
-106   */
-107  HTableDescriptor[] listTables(Pattern 
pattern) throws IOException;
-108
-109  /**
-110   * List all the userspace tables 
matching the given regular expression.
-111   *
-112   * @param regex The regular expression 
to match against
-113   * @return - returns an array of 
HTableDescriptors
-114   * @throws IOException if a remote or 
network exception occurs
-115   * @see 
#listTables(java.util.regex.Pattern)
-116   */
-117  HTableDescriptor[] listTables(String 
regex) throws IOException;
-118
-119  /**
-120   * List all the tables matching the 
given pattern.
-121   *
-122   * @param pattern The compiled regular 

[04/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Evolving.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Evolving.html
 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Evolving.html
index e36a3ab..4bae063 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Evolving.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Evolving.html
@@ -345,72 +345,78 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 class
-CallQueueTooBigException
+CallDroppedException
+Returned to the clients when their request was discarded 
due to server being overloaded.
+
 
 
+class
+CallQueueTooBigException
+
+
 interface
 Cell
 The unit of storage in HBase consisting of the following 
fields:
  
 
 
-
+
 class
 CellComparator
 Compare two HBase cells.
 
 
-
+
 class
 CellUtil
 Utility methods helpful slinging Cell instances.
 
 
-
+
 class
 ClusterStatus
 Status information on the HBase cluster.
 
 
-
+
 interface
 Coprocessor
 Coprocessor interface.
 
 
-
+
 class
 HBaseInterfaceAudience
 This class defines constants for different classes of hbase 
limited private apis
 
 
-
+
 class
 HBaseIOException
 All hbase specific IOExceptions should be subclasses of 
HBaseIOException
 
 
-
+
 class
 HColumnDescriptor
 An HColumnDescriptor contains information about a column 
family such as the
  number of versions, compression settings, etc.
 
 
-
+
 class
 HRegionInfo
 Information about a region.
 
 
-
+
 class
 HRegionLocation
 Data structure to hold HRegionInfo and the address for the 
hosting
  HRegionServer.
 
 
-
+
 class
 HTableDescriptor
 HTableDescriptor contains the details about an HBase table  
such as the descriptors of
@@ -419,118 +425,118 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
  when the region split should occur, coprocessors associated with it 
etc...
 
 
-
+
 class
 InvalidFamilyOperationException
 Thrown if a request is table schema modification is 
requested but
  made for an invalid family name.
 
 
-
+
 class
 KeepDeletedCells
 Ways to keep cells marked for delete around.
 
 
-
+
 class
 LocalHBaseCluster
 This class creates a single process HBase cluster.
 
 
-
+
 class
 MultiActionResultTooLarge
 Exception thrown when the result needs to be chunked on the 
server side.
 
 
-
+
 class
 NamespaceDescriptor
 Namespace POJO class.
 
 
-
+
 static class
 NamespaceDescriptor.Builder
 
-
+
 class
 OffheapTag
 This is a Tag implementation in which value is 
backed by an off heap
  http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
 
 
-
+
 class
 ProcedureInfo
 Procedure information
 
 
-
+
 class
 RegionLoad
 Encapsulates per-region load metrics.
 
 
-
+
 class
 RegionTooBusyException
 Thrown by a region server if it will block and wait to 
serve a request.
 
 
-
+
 class
 RetryImmediatelyException
 
-
+
 interface
 Server
 Defines the set of shared functions implemented by HBase 
servers (Masters
  and RegionServers).
 
 
-
+
 class
 ServerLoad
 This class is used for exporting current state of load on a 
RegionServer.
 
 
-
+
 class
 ServerName
 Instance of an HBase ServerName.
 
 
-
+
 interface
 Stoppable
 Implementers are Stoppable.
 
 
-
+
 class
 TableInfoMissingException
 Failed to find .tableinfo file under table dir
 
 
-
+
 class
 TableName
 Immutable POJO class for representing a table name.
 
 
-
+
 interface
 Tag
 Tags are part of cells and helps to add metadata about 
them.
 
 
-
+
 class
 TagType
 
-
+
 class
 ZooKeeperConnectionException
 Thrown if the client can't connect to zookeeper
@@ -584,23 +590,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-static class
-Admin.MasterSwitchType
-
-
 interface
 BufferedMutator
 Used to communicate with a single HBase table similar to Table but 
meant for
  batched, asynchronous puts.
 
 
-
+
 static interface
 BufferedMutator.ExceptionListener
 Listens for asynchronous exceptions on a BufferedMutator.
 
 
-
+
 class
 BufferedMutatorImpl
 
@@ -608,12 +610,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  but meant for batched, potentially asynchronous puts.
 
 
-
+
 class
 BufferedMutatorParams
 Parameters for instantiating a BufferedMutator.
 
 
+
+class
+CompactionState
+POJO representing the compaction state
+
+
 
 interface
 Connection
@@ -680,71 +688,89 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 class
-Mutation
+MasterSwitchType
+Represents the master switch type
+
 
 
 class
+Mutation
+
+
+class
 Operation
 Superclass for any type that maps to a potentially 
application-level 

[29/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/src-html/org/apache/hadoop/hbase/client/CompactType.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/CompactType.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/CompactType.html
new file mode 100644
index 000..dc2e7b2
--- /dev/null
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/CompactType.html
@@ -0,0 +1,107 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package org.apache.hadoop.hbase.client;
+019import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+020import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+021
+022/**
+023 * Currently, there are only two compact 
types:
+024 * {@code NORMAL} means do store files 
compaction;
+025 * {@code MOB} means do mob files 
compaction.
+026 * */
+027@InterfaceAudience.Public
+028@InterfaceStability.Unstable
+029public enum CompactType {
+030
+031  NORMAL(0),
+032  MOB   (1);
+033
+034  CompactType(int value) {}
+035}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/src-html/org/apache/hadoop/hbase/client/CompactionState.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/CompactionState.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/CompactionState.html
new file mode 100644
index 000..8446ab0
--- /dev/null
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/CompactionState.html
@@ -0,0 +1,101 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package org.apache.hadoop.hbase.client;
+019import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+020import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+021
+022/**
+023 * POJO representing the compaction 
state
+024 */
+025@InterfaceAudience.Public
+026@InterfaceStability.Evolving
+027public enum CompactionState {
+028  NONE, MINOR, MAJOR, MAJOR_AND_MINOR;
+029}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/src-html/org/apache/hadoop/hbase/client/MasterSwitchType.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/MasterSwitchType.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/MasterSwitchType.html
new file mode 100644
index 000..8db4db4
--- /dev/null
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/MasterSwitchType.html
@@ -0,0 +1,101 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache 

[31/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.MasterSwitchType.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.MasterSwitchType.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.MasterSwitchType.html
deleted file mode 100644
index be61fef..000
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.MasterSwitchType.html
+++ /dev/null
@@ -1,1839 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/**
-002 *
-003 * Licensed to the Apache Software 
Foundation (ASF) under one
-004 * or more contributor license 
agreements.  See the NOTICE file
-005 * distributed with this work for 
additional information
-006 * regarding copyright ownership.  The 
ASF licenses this file
-007 * to you under the Apache License, 
Version 2.0 (the
-008 * "License"); you may not use this file 
except in compliance
-009 * with the License.  You may obtain a 
copy of the License at
-010 *
-011 * 
http://www.apache.org/licenses/LICENSE-2.0
-012 *
-013 * Unless required by applicable law or 
agreed to in writing, software
-014 * distributed under the License is 
distributed on an "AS IS" BASIS,
-015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-016 * See the License for the specific 
language governing permissions and
-017 * limitations under the License.
-018 */
-019package org.apache.hadoop.hbase.client;
-020
-021import java.io.Closeable;
-022import java.io.IOException;
-023import java.util.List;
-024import java.util.Map;
-025import java.util.concurrent.Future;
-026import java.util.regex.Pattern;
-027
-028import 
org.apache.hadoop.conf.Configuration;
-029import 
org.apache.hadoop.hbase.Abortable;
-030import 
org.apache.hadoop.hbase.ClusterStatus;
-031import 
org.apache.hadoop.hbase.HColumnDescriptor;
-032import 
org.apache.hadoop.hbase.HRegionInfo;
-033import 
org.apache.hadoop.hbase.HTableDescriptor;
-034import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-035import 
org.apache.hadoop.hbase.ProcedureInfo;
-036import 
org.apache.hadoop.hbase.ServerName;
-037import 
org.apache.hadoop.hbase.TableExistsException;
-038import 
org.apache.hadoop.hbase.TableName;
-039import 
org.apache.hadoop.hbase.TableNotFoundException;
-040import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-041import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-042import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-043import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-044import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
-045import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
-046import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
-047import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-048import 
org.apache.hadoop.hbase.quotas.QuotaRetriever;
-049import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-050import 
org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
-051import 
org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
-052import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-053import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-054import 
org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
-055import 
org.apache.hadoop.hbase.util.Pair;
-056
-057/**
-058 * The administrative API for HBase. 
Obtain an instance from an {@link Connection#getAdmin()} and
-059 * call {@link #close()} afterwards.
-060 * pAdmin can be used to create, 
drop, list, enable and disable tables, add and drop table
-061 * column families and other 
administrative operations.
-062 *
-063 * @see ConnectionFactory
-064 * @see Connection
-065 * @see Table
-066 * @since 0.99.0
-067 */
-068@InterfaceAudience.Public
-069@InterfaceStability.Evolving
-070public interface Admin extends Abortable, 
Closeable {
-071  int getOperationTimeout();
-072
-073  @Override
-074  void abort(String why, Throwable e);
-075
-076  @Override
-077  boolean isAborted();
-078
-079  /**
-080   * @return Connection used by this 
object.
-081   */
-082  Connection getConnection();
-083
-084  /**
-085   * @param tableName Table to check.
-086   * @return True if table exists 
already.
-087   * @throws IOException
-088   */
-089  boolean tableExists(final TableName 
tableName) throws IOException;
-090
-091  /**
-092   * List all the userspace tables.
-093   *
-094   * @return - returns an array of 
HTableDescriptors
-095   * @throws IOException if a remote or 
network exception occurs
-096   */
-097  HTableDescriptor[] listTables() throws 
IOException;
-098
-099  /**
-100   * List all the userspace tables 
matching the given pattern.
-101   *
-102   * @param pattern The compiled regular 
expression to match against
-103   * @return - returns an array of 
HTableDescriptors
-104   * @throws IOException if a remote or 
network 

[38/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html
new file mode 100644
index 000..f329d7e
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/SnapshotType.html
@@ -0,0 +1,214 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+Uses of Class org.apache.hadoop.hbase.client.SnapshotType (Apache HBase 
2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of Classorg.apache.hadoop.hbase.client.SnapshotType
+
+
+
+
+
+Packages that use SnapshotType
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.client
+
+Provides HBase Client
+
+
+
+
+
+
+
+
+
+
+Uses of SnapshotType in org.apache.hadoop.hbase.client
+
+Methods in org.apache.hadoop.hbase.client
 that return SnapshotType
+
+Modifier and Type
+Method and Description
+
+
+
+SnapshotType
+SnapshotDescription.getType()
+
+
+static SnapshotType
+SnapshotType.valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+Returns the enum constant of this type with the specified 
name.
+
+
+
+static SnapshotType[]
+SnapshotType.values()
+Returns an array containing the constants of this enum 
type, in
+the order they are declared.
+
+
+
+
+
+Methods in org.apache.hadoop.hbase.client
 with parameters of type SnapshotType
+
+Modifier and Type
+Method and Description
+
+
+
+void
+Admin.snapshot(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringsnapshotName,
+TableNametableName,
+SnapshotTypetype)
+Create typed snapshot of the table.
+
+
+
+
+
+Constructors in org.apache.hadoop.hbase.client
 with parameters of type SnapshotType
+
+Constructor and Description
+
+
+
+SnapshotDescription(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringtable,
+  SnapshotTypetype)
+
+
+SnapshotDescription(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringtable,
+  SnapshotTypetype,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringowner)
+
+
+SnapshotDescription(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringtable,
+  SnapshotTypetype,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringowner,
+  longcreationTime,
+  intversion)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+Copyright  20072016 http://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/client/class-use/Table.html
--
diff --git 

[19/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/ByteBufferedKeyOnlyKeyValue.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ByteBufferedKeyOnlyKeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/ByteBufferedKeyOnlyKeyValue.html
index b5b7abc..17ec53e 100644
--- a/devapidocs/org/apache/hadoop/hbase/ByteBufferedKeyOnlyKeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/ByteBufferedKeyOnlyKeyValue.html
@@ -36,7 +36,7 @@
 
 
 Prev 
Class
-Next 
Class
+Next 
Class
 
 
 Frames
@@ -852,7 +852,7 @@ extends 
 
 Prev 
Class
-Next 
Class
+Next 
Class
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/CallDroppedException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/CallDroppedException.html 
b/devapidocs/org/apache/hadoop/hbase/CallDroppedException.html
new file mode 100644
index 000..a9b6cee
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/CallDroppedException.html
@@ -0,0 +1,271 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+CallDroppedException (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase
+Class 
CallDroppedException
+
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">java.lang.Throwable
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">java.lang.Exception
+
+
+http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">java.io.IOException
+
+
+org.apache.hadoop.hbase.CallDroppedException
+
+
+
+
+
+
+
+
+
+
+
+
+
+All Implemented Interfaces:
+http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable
+
+
+
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class CallDroppedException
+extends http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+Returned to the clients when their request was discarded 
due to server being overloaded.
+ Clients should retry upon receiving it.
+See Also:Serialized
 Form
+
+
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+CallDroppedException()
+
+
+CallDroppedException(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringmessage)
+
+
+
+
+
+
+
+
+
+Method Summary
+
+
+
+
+Methods inherited from classjava.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwable
+http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true#addSuppressed(java.lang.Throwable)"
 title="class or interface in java.lang">addSuppressed, http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true#fillInStackTrace()"
 title="class or interface in java.lang">fillInStackTrace, http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true#getCause()"
 title="class or interface in java.lang">getCause, http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true#getLocalizedMessage()"
 title="class or interface in java.lang">getLocalizedMessage, http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true#getMessage()"
 title="class or interface in java.lang">getMessage, http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true#getStackTrace()"
 title="class or inter
 face in java.lang">getStackTrace, http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true#getSuppressed()"
 title="class or interface in java.lang">getSuppressed, http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true#initCause(java.lang.Throwable)"
 

[06/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 0b8d82d..8b60c73 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -313,11 +313,11 @@ service.
 
 
 private TableName
-MetaTableAccessor.TableVisitorBase.tableName
+HRegionInfo.tableName
 
 
 private TableName
-HRegionInfo.tableName
+MetaTableAccessor.TableVisitorBase.tableName
 
 
 
@@ -759,39 +759,39 @@ service.
 
 
 private TableName
-HRegionLocator.tableName
+HBaseAdmin.TableFuture.tableName
 
 
 private TableName
-BufferedMutatorParams.tableName
+ScannerCallableWithReplicas.tableName
 
 
 private TableName
-ClientScanner.tableName
+BufferedMutatorParams.tableName
 
 
 private TableName
-BufferedMutatorImpl.tableName
+ClientScanner.tableName
 
 
 protected TableName
-RegionServerCallable.tableName
+RegionAdminServiceCallable.tableName
 
 
 private TableName
 TableState.tableName
 
 
-protected TableName
-RpcRetryingCallerWithReadReplicas.tableName
+private TableName
+AsyncProcess.AsyncRequestFutureImpl.tableName
 
 
 private TableName
-AsyncProcess.AsyncRequestFutureImpl.tableName
+HRegionLocator.tableName
 
 
 private TableName
-ScannerCallableWithReplicas.tableName
+BufferedMutatorImpl.tableName
 
 
 private TableName
@@ -799,11 +799,11 @@ service.
 
 
 protected TableName
-RegionAdminServiceCallable.tableName
+RegionServerCallable.tableName
 
 
-private TableName
-HBaseAdmin.TableFuture.tableName
+protected TableName
+RpcRetryingCallerWithReadReplicas.tableName
 
 
 
@@ -843,21 +843,13 @@ service.
 
 
 TableName
-HRegionLocator.getName()
-
-
-TableName
 RegionLocator.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
-
-TableName
-BufferedMutatorImpl.getName()
-
 
 TableName
-HTable.getName()
+HRegionLocator.getName()
 
 
 TableName
@@ -867,6 +859,14 @@ service.
 
 
 TableName
+BufferedMutatorImpl.getName()
+
+
+TableName
+HTable.getName()
+
+
+TableName
 HTableWrapper.getName()
 
 
@@ -874,12 +874,12 @@ service.
 ClientScanner.getTable()
 
 
-TableName
-BufferedMutatorParams.getTableName()
+protected TableName
+HBaseAdmin.TableFuture.getTableName()
 
 
 TableName
-RegionServerCallable.getTableName()
+BufferedMutatorParams.getTableName()
 
 
 TableName
@@ -888,8 +888,8 @@ service.
 
 
 
-protected TableName
-HBaseAdmin.TableFuture.getTableName()
+TableName
+RegionServerCallable.getTableName()
 
 
 private TableName
@@ -897,84 +897,84 @@ service.
 
 
 TableName[]
-Admin.listTableNames()
-List all of the names of userspace tables.
-
+HBaseAdmin.listTableNames()
 
 
 TableName[]
-HConnection.listTableNames()
+ConnectionImplementation.listTableNames()
 Deprecated.
-Use Admin.listTables()
 instead.
+Use Admin.listTableNames()
 instead
 
 
 
 
 TableName[]
-HBaseAdmin.listTableNames()
+Admin.listTableNames()
+List all of the names of userspace tables.
+
 
 
 TableName[]
-ConnectionImplementation.listTableNames()
+HConnection.listTableNames()
 Deprecated.
-Use Admin.listTableNames()
 instead
+Use Admin.listTables()
 instead.
 
 
 
 
 TableName[]
+HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
+
+
+TableName[]
 Admin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
 List all of the names of userspace tables.
 
 
-
+
 TableName[]
-HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
+HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern,
+booleanincludeSysTables)
 
-
+
 TableName[]
 Admin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern,
 booleanincludeSysTables)
 List all of the names of userspace tables.
 
 
-
+
 TableName[]
-HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern,
-booleanincludeSysTables)
+HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringregex)
 
-
+
 TableName[]
 

[32/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.CompactType.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.CompactType.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.CompactType.html
deleted file mode 100644
index be61fef..000
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Admin.CompactType.html
+++ /dev/null
@@ -1,1839 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/**
-002 *
-003 * Licensed to the Apache Software 
Foundation (ASF) under one
-004 * or more contributor license 
agreements.  See the NOTICE file
-005 * distributed with this work for 
additional information
-006 * regarding copyright ownership.  The 
ASF licenses this file
-007 * to you under the Apache License, 
Version 2.0 (the
-008 * "License"); you may not use this file 
except in compliance
-009 * with the License.  You may obtain a 
copy of the License at
-010 *
-011 * 
http://www.apache.org/licenses/LICENSE-2.0
-012 *
-013 * Unless required by applicable law or 
agreed to in writing, software
-014 * distributed under the License is 
distributed on an "AS IS" BASIS,
-015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-016 * See the License for the specific 
language governing permissions and
-017 * limitations under the License.
-018 */
-019package org.apache.hadoop.hbase.client;
-020
-021import java.io.Closeable;
-022import java.io.IOException;
-023import java.util.List;
-024import java.util.Map;
-025import java.util.concurrent.Future;
-026import java.util.regex.Pattern;
-027
-028import 
org.apache.hadoop.conf.Configuration;
-029import 
org.apache.hadoop.hbase.Abortable;
-030import 
org.apache.hadoop.hbase.ClusterStatus;
-031import 
org.apache.hadoop.hbase.HColumnDescriptor;
-032import 
org.apache.hadoop.hbase.HRegionInfo;
-033import 
org.apache.hadoop.hbase.HTableDescriptor;
-034import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-035import 
org.apache.hadoop.hbase.ProcedureInfo;
-036import 
org.apache.hadoop.hbase.ServerName;
-037import 
org.apache.hadoop.hbase.TableExistsException;
-038import 
org.apache.hadoop.hbase.TableName;
-039import 
org.apache.hadoop.hbase.TableNotFoundException;
-040import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-041import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-042import 
org.apache.hadoop.hbase.client.security.SecurityCapability;
-043import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-044import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
-045import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
-046import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
-047import 
org.apache.hadoop.hbase.quotas.QuotaFilter;
-048import 
org.apache.hadoop.hbase.quotas.QuotaRetriever;
-049import 
org.apache.hadoop.hbase.quotas.QuotaSettings;
-050import 
org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
-051import 
org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
-052import 
org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
-053import 
org.apache.hadoop.hbase.snapshot.SnapshotCreationException;
-054import 
org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
-055import 
org.apache.hadoop.hbase.util.Pair;
-056
-057/**
-058 * The administrative API for HBase. 
Obtain an instance from an {@link Connection#getAdmin()} and
-059 * call {@link #close()} afterwards.
-060 * pAdmin can be used to create, 
drop, list, enable and disable tables, add and drop table
-061 * column families and other 
administrative operations.
-062 *
-063 * @see ConnectionFactory
-064 * @see Connection
-065 * @see Table
-066 * @since 0.99.0
-067 */
-068@InterfaceAudience.Public
-069@InterfaceStability.Evolving
-070public interface Admin extends Abortable, 
Closeable {
-071  int getOperationTimeout();
-072
-073  @Override
-074  void abort(String why, Throwable e);
-075
-076  @Override
-077  boolean isAborted();
-078
-079  /**
-080   * @return Connection used by this 
object.
-081   */
-082  Connection getConnection();
-083
-084  /**
-085   * @param tableName Table to check.
-086   * @return True if table exists 
already.
-087   * @throws IOException
-088   */
-089  boolean tableExists(final TableName 
tableName) throws IOException;
-090
-091  /**
-092   * List all the userspace tables.
-093   *
-094   * @return - returns an array of 
HTableDescriptors
-095   * @throws IOException if a remote or 
network exception occurs
-096   */
-097  HTableDescriptor[] listTables() throws 
IOException;
-098
-099  /**
-100   * List all the userspace tables 
matching the given pattern.
-101   *
-102   * @param pattern The compiled regular 
expression to match against
-103   * @return - returns an array of 
HTableDescriptors
-104   * @throws IOException if a remote or 
network exception occurs
-105   * 

[03/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/client/Admin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Admin.html 
b/devapidocs/org/apache/hadoop/hbase/client/Admin.html
index 206cabf..7c54828 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Admin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Admin.html
@@ -36,7 +36,7 @@
 
 
 Prev 
Class
-Next 
Class
+Next 
Class
 
 
 Frames
@@ -60,7 +60,7 @@
 
 
 Summary:
-Nested|
+Nested|
 Field|
 Constr|
 Method
@@ -97,7 +97,7 @@
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public interface Admin
+public interface Admin
 extends Abortable, http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable
 The administrative API for HBase. Obtain an instance from 
an Connection.getAdmin()
 and
  call close()
 afterwards.
@@ -114,33 +114,6 @@ extends 
 
 
-
-
-
-
-
-Nested Class Summary
-
-Nested Classes
-
-Modifier and Type
-Interface and Description
-
-
-static class
-Admin.CompactType
-Currently, there are only two compact types:
- NORMAL means do store files compaction;
- MOB means do mob files compaction.
-
-
-
-static class
-Admin.MasterSwitchType
-
-
-
-
 
 
 
@@ -271,24 +244,24 @@ extends 
 void
-compact(TableNametableName,
-  Admin.CompactTypecompactType)
-Compact a table.
+compact(TableNametableName,
+  byte[]columnFamily)
+Compact a column family within a table.
 
 
 
 void
-compact(TableNametableName,
-  byte[]columnFamily)
+compact(TableNametableName,
+  byte[]columnFamily,
+  CompactTypecompactType)
 Compact a column family within a table.
 
 
 
 void
-compact(TableNametableName,
-  byte[]columnFamily,
-  Admin.CompactTypecompactType)
-Compact a column family within a table.
+compact(TableNametableName,
+  CompactTypecompactType)
+Compact a table.
 
 
 
@@ -564,20 +537,20 @@ extends getClusterStatus()
 
 
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState
+CompactionState
 getCompactionState(TableNametableName)
 Get the current compaction state of a table.
 
 
 
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState
-getCompactionState(TableNametableName,
-Admin.CompactTypecompactType)
+CompactionState
+getCompactionState(TableNametableName,
+CompactTypecompactType)
 Get the current compaction state of a table.
 
 
 
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState
+CompactionState
 getCompactionStateForRegion(byte[]regionName)
 Get the current compaction state of region.
 
@@ -703,13 +676,13 @@ extends 
 boolean
-isSnapshotFinished(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionsnapshot)
+isSnapshotFinished(SnapshotDescriptionsnapshot)
 Check the current state of the passed snapshot.
 
 
 
 boolean
-isSplitOrMergeEnabled(Admin.MasterSwitchTypeswitchType)
+isSplitOrMergeEnabled(MasterSwitchTypeswitchType)
 Query the current state of the switch
 
 
@@ -746,19 +719,19 @@ extends 
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
 listSnapshots()
 List completed snapshots.
 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
 listSnapshots(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
 List all the completed snapshots matching the given 
pattern.
 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListSnapshotDescription
 listSnapshots(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
 List all the completed snapshots matching the given regular 
expression.
 
@@ -840,7 +813,7 @@ extends 
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 

[24/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 5da591c..94b 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -280,10 +280,10 @@
 Warnings
 Errors
 
-1745
+1756
 0
 0
-12395
+12365
 
 Files
 
@@ -1206,1457 +1206,1457 @@
 org/apache/hadoop/hbase/coprocessor/BaseMasterAndRegionObserver.java
 0
 0
-12
+24
 
-org/apache/hadoop/hbase/coprocessor/BaseMasterObserver.java
-0
-0
-12
-
 org/apache/hadoop/hbase/coprocessor/BaseRegionObserver.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/coprocessor/BaseRegionServerObserver.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/coprocessor/BaseWALObserver.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/coprocessor/BulkLoadObserver.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java
 0
 0
 21
-
+
 org/apache/hadoop/hbase/coprocessor/CoprocessorException.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/coprocessor/CoprocessorService.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/EndpointObserver.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/coprocessor/MasterCoprocessorEnvironment.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/MasterObserver.java
 0
 0
-45
-
+7
+
 org/apache/hadoop/hbase/coprocessor/MultiRowMutationEndpoint.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/coprocessor/ObserverContext.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/RegionObserver.java
 0
 0
 62
-
+
 org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java
 0
 0
 27
-
+
 org/apache/hadoop/hbase/coprocessor/SingletonCoprocessorService.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/WALCoprocessorEnvironment.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/WALObserver.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/coprocessor/example/BulkDeleteEndpoint.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/coprocessor/example/RowCountEndpoint.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/coprocessor/example/ZooKeeperScanPolicyObserver.java
 0
 0
 22
-
+
 org/apache/hadoop/hbase/coprocessor/package-info.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/errorhandling/ForeignException.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/errorhandling/ForeignExceptionDispatcher.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/errorhandling/TimeoutExceptionInjector.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/exceptions/ClientExceptionsUtil.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/exceptions/FailedSanityCheckException.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/exceptions/LockTimeoutException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/executor/EventHandler.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/executor/EventType.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/executor/ExecutorService.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/executor/ExecutorType.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/filter/BinaryComparator.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/filter/BitComparator.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/filter/ByteArrayComparable.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
 0
 0
 20
-
+
 org/apache/hadoop/hbase/filter/CompareFilter.java
 0
 0
 21
-
+
 org/apache/hadoop/hbase/filter/DependentColumnFilter.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/filter/FamilyFilter.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/filter/Filter.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/filter/FilterBase.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/filter/FilterList.java
 0
 0
 37
-
+
 org/apache/hadoop/hbase/filter/FilterWrapper.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
 0
 0
 16
-
+
 org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/filter/LongComparator.java
 0
 0
 39
-
+
 org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/filter/NullComparator.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/filter/PageFilter.java
 0
 0
 6
-
+
 

[25/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/book.html
--
diff --git a/book.html b/book.html
index 1631876..9d46d8b 100644
--- a/book.html
+++ b/book.html
@@ -777,7 +777,7 @@ Use the jps to be sure that the HMaster and 
HRegionServer processes
 2.3. Intermediate - Pseudo-Distributed Local 
Install
 
 After working your way through quickstart, you 
can re-configure HBase to run in pseudo-distributed mode.
-Pseudo-distributed mode means that HBase still runs completely on a single 
host, but each HBase daemon (HMaster, HRegionServer, and Zookeeper) runs as a 
separate process.
+Pseudo-distributed mode means that HBase still runs completely on a single 
host, but each HBase daemon (HMaster, HRegionServer, and ZooKeeper) runs as a 
separate process.
 By default, unless you configure the hbase.rootdir property as 
described in quickstart, your data is still stored in 
/tmp/.
 In this walk-through, we store your data in HDFS instead, assuming you have 
HDFS available.
 You can skip the HDFS configuration to continue storing your data in the local 
filesystem.
@@ -959,7 +959,7 @@ The following command starts four additional RegionServers, 
running on sequentia
 
 In reality, you need a fully-distributed configuration to fully test HBase 
and to use it in real-world scenarios.
 In a distributed configuration, the cluster contains multiple nodes, each of 
which runs one or more HBase daemon.
-These include primary and backup Master instances, multiple Zookeeper nodes, 
and multiple RegionServer nodes.
+These include primary and backup Master instances, multiple ZooKeeper nodes, 
and multiple RegionServer nodes.
 
 
 This advanced quickstart adds two more nodes to your cluster.
@@ -1942,7 +1942,7 @@ Set JAVA_HOME to point at the 
root of your java install
 This is the default mode.
 Standalone mode is what is described in the quickstart section.
 In standalone mode, HBase does not use HDFSit uses the 
local filesystem insteadand it runs all HBase daemons and 
a local ZooKeeper all up in the same JVM.
-Zookeeper binds to a well known port so clients may talk to HBase.
+ZooKeeper binds to a well known port so clients may talk to HBase.
 
 
 
@@ -2000,7 +2000,7 @@ These configuration basics are all demonstrated in 
 Distributed RegionServers
-Typically, your cluster will contain multiple RegionServers all running on 
different servers, as well as primary and backup Master and Zookeeper daemons.
+Typically, your cluster will contain multiple RegionServers all running on 
different servers, as well as primary and backup Master and ZooKeeper daemons.
 The conf/regionservers file on the master server contains a list of 
hosts whose RegionServers are associated with this cluster.
 Each host is on a separate line.
 All hosts listed in this file will have their RegionServer processes started 
and stopped when the master server starts or stops.
@@ -2362,21 +2362,6 @@ Configuration that it is thought rare anyone would 
change can exist only in code
 
 
 
-
-
-hbase.master.catalog.timeout
-
-
-Description
-Timeout value for the Catalog Janitor from the master to META.
-
-
-Default
-60
-
-
-
-
 
 
 hbase.master.infoserver.redirect
@@ -2632,21 +2617,6 @@ Configuration that it is thought rare anyone would 
change can exist only in code
 
 
 
-
-
-hbase.regionserver.catalog.timeout
-
-
-Description
-Timeout value for the Catalog Janitor from the regionserver to META.
-
-
-Default
-60
-
-
-
-
 
 
 hbase.regionserver.dns.interface
@@ -2737,21 +2707,6 @@ Configuration that it is thought rare anyone would 
change can exist only in code
 
 
 
-
-
-zookeeper.znode.rootserver
-
-
-Description
-Path to ZNode holding root region location. This is written by the master 
and read by clients and region servers. If a relative path is given, the parent 
folder will be ${zookeeper.znode.parent}. By default, this means the root 
location is stored at /hbase/root-region-server.
-
-
-Default
-root-region-server
-
-
-
-
 
 
 zookeeper.znode.acl.parent
@@ -3618,9 +3573,9 @@ Configuration that it is thought rare anyone would change 
can exist only in code
 
 
 
-
+
 
-hbase.bucketcache.sizes
+hbase.bucketcache.bucket.sizes
 
 
 Description
@@ -4203,21 +4158,6 @@ Configuration that it is thought rare anyone would 
change can exist only in code
 
 
 
-
-
-hbase.thrift.htablepool.size.max
-
-
-Description
-The upper bound for the table pool used in the Thrift gateways server. 
Since this is per table name, we assume a single table and so with 1000 default 
worker threads max this is set to a matching number. For other workloads this 
number can be adjusted as needed.
-
-
-Default
-1000
-
-
-
-
 
 
 hbase.regionserver.thrift.framed
@@ -4308,36 +4248,6 @@ Configuration that it is thought rare anyone would 
change can exist only in code
 
 
 
-
-
-hbase.metrics.showTableName
-
-
-Description
-Whether to include the prefix "tbl.tablename" in per-column family 

[43/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
index fac4c18..3264026 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -391,20 +391,20 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 TableName
-BufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+RegionLocator.getName()
+Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-RegionLocator.getName()
+Table.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
 
 TableName
-Table.getName()
-Gets the fully qualified table name instance of this 
table.
+BufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
 
 
 
@@ -413,18 +413,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 TableName[]
-Admin.listTableNames()
-List all of the names of userspace tables.
-
-
-
-TableName[]
 HConnection.listTableNames()
 Deprecated.
 Use Admin.listTables()
 instead.
 
 
 
+
+TableName[]
+Admin.listTableNames()
+List all of the names of userspace tables.
+
+
 
 TableName[]
 Admin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
@@ -523,24 +523,24 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-Admin.compact(TableNametableName,
-  Admin.CompactTypecompactType)
-Compact a table.
+Admin.compact(TableNametableName,
+  byte[]columnFamily)
+Compact a column family within a table.
 
 
 
 void
-Admin.compact(TableNametableName,
-  byte[]columnFamily)
+Admin.compact(TableNametableName,
+  byte[]columnFamily,
+  CompactTypecompactType)
 Compact a column family within a table.
 
 
 
 void
-Admin.compact(TableNametableName,
-  byte[]columnFamily,
-  Admin.CompactTypecompactType)
-Compact a column family within a table.
+Admin.compact(TableNametableName,
+  CompactTypecompactType)
+Compact a table.
 
 
 
@@ -619,15 +619,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState
+CompactionState
 Admin.getCompactionState(TableNametableName)
 Get the current compaction state of a table.
 
 
 
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState
-Admin.getCompactionState(TableNametableName,
-Admin.CompactTypecompactType)
+CompactionState
+Admin.getCompactionState(TableNametableName,
+CompactTypecompactType)
 Get the current compaction state of a table.
 
 
@@ -668,38 +668,31 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 RegionLocator
-Connection.getRegionLocator(TableNametableName)
+HConnection.getRegionLocator(TableNametableName)
+Deprecated.
 Retrieve a RegionLocator implementation to inspect region 
information on a table.
 
 
 
 RegionLocator
-HConnection.getRegionLocator(TableNametableName)
-Deprecated.
+Connection.getRegionLocator(TableNametableName)
 Retrieve a RegionLocator implementation to inspect region 
information on a table.
 
 
 
-Table
-Connection.getTable(TableNametableName)
-Retrieve a Table implementation for accessing a table.
-
-
-
 org.apache.hadoop.hbase.client.HTableInterface
 HConnection.getTable(TableNametableName)
 Deprecated.
 Retrieve an HTableInterface implementation for access to a 
table.
 
 
-
+
 Table
-Connection.getTable(TableNametableName,
-http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServicepool)
+Connection.getTable(TableNametableName)
 Retrieve a Table implementation for accessing a table.
 
 
-
+
 org.apache.hadoop.hbase.client.HTableInterface
 HConnection.getTable(TableNametableName,
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServicepool)
@@ -707,6 +700,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Retrieve an HTableInterface implementation for access to a 
table.
 
 
+
+Table
+Connection.getTable(TableNametableName,
+

[13/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
index cef4c15..d16d0a3 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
@@ -863,19 +863,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-MultiServerCallable.getHRegionInfo()
+ScannerCallableWithReplicas.getHRegionInfo()
 
 
 HRegionInfo
-RegionServerCallable.getHRegionInfo()
+ScannerCallable.getHRegionInfo()
 
 
 HRegionInfo
-ScannerCallableWithReplicas.getHRegionInfo()
+MultiServerCallable.getHRegionInfo()
 
 
 HRegionInfo
-ScannerCallable.getHRegionInfo()
+RegionServerCallable.getHRegionInfo()
 
 
 private HRegionInfo
@@ -905,13 +905,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
-Admin.getOnlineRegions(ServerNamesn)
-Get all the online regions on a region server.
-
+HBaseAdmin.getOnlineRegions(ServerNamesn)
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
-HBaseAdmin.getOnlineRegions(ServerNamesn)
+Admin.getOnlineRegions(ServerNamesn)
+Get all the online regions on a region server.
+
 
 
 (package private) PairHRegionInfo,ServerName
@@ -919,13 +919,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
-Admin.getTableRegions(TableNametableName)
-Get the regions of a given table.
-
+HBaseAdmin.getTableRegions(TableNametableName)
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
-HBaseAdmin.getTableRegions(TableNametableName)
+Admin.getTableRegions(TableNametableName)
+Get the regions of a given table.
+
 
 
 
@@ -944,16 +944,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
+HBaseAdmin.closeRegion(ServerNamesn,
+  HRegionInfohri)
+
+
+void
 Admin.closeRegion(ServerNamesn,
   HRegionInfohri)
 Close a region.
 
 
-
-void
-HBaseAdmin.closeRegion(ServerNamesn,
-  HRegionInfohri)
-
 
 private void
 HBaseAdmin.compact(ServerNamesn,
@@ -1118,22 +1118,48 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
+BaseMasterObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx,
+HRegionInforegionInfo)
+
+
+void
 MasterObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx,
 HRegionInforegionInfo)
 Called after the region assignment has been requested.
 
 
+
+void
+BaseMasterAndRegionObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx,
+HTableDescriptordesc,
+HRegionInfo[]regions)
+
 
 void
-BaseMasterObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx,
-HRegionInforegionInfo)
+BaseMasterObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx,
+HTableDescriptordesc,
+HRegionInfo[]regions)
 
 
 void
+MasterObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx,
+HTableDescriptordesc,
+HRegionInfo[]regions)
+Called after the createTable operation has been 
requested.
+
+
+
+void
 BaseMasterAndRegionObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx,
   HTableDescriptordesc,
   HRegionInfo[]regions)
 
+
+void
+BaseMasterObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx,
+  HTableDescriptordesc,
+  HRegionInfo[]regions)
+
 
 void
 MasterObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx,
@@ -1144,33 +1170,45 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-BaseMasterObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx,
-  HTableDescriptordesc,
-  HRegionInfo[]regions)

[16/51] [partial] hbase-site git commit: Published site at 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5.

2016-05-06 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/958717f4/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 946bca4..c07c93b 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -380,26 +380,19 @@ service.
 
 
 
-Cell
-ShareableMemory.cloneToCell()
-Does a deep copy of the contents to a new memory area and
- returns it in the form of a cell.
-
-
-
 static Cell
 CellUtil.createCell(byte[]row)
 Create a Cell with specific row.
 
 
-
+
 static Cell
 CellUtil.createCell(byte[]row,
 byte[]value)
 Create a Cell with specific row and value.
 
 
-
+
 static Cell
 CellUtil.createCell(byte[]row,
 byte[]family,
@@ -407,7 +400,7 @@ service.
 Create a Cell with specific row.
 
 
-
+
 static Cell
 CellUtil.createCell(byte[]row,
 byte[]family,
@@ -416,7 +409,7 @@ service.
 bytetype,
 byte[]value)
 
-
+
 static Cell
 CellUtil.createCell(byte[]row,
 byte[]family,
@@ -429,7 +422,7 @@ service.
 Marked as audience Private as of 1.2.0.
 
 
-
+
 static Cell
 CellUtil.createCell(byte[]row,
 byte[]family,
@@ -441,7 +434,7 @@ service.
 Marked as audience Private as of 1.2.0.
 
 
-
+
 static Cell
 CellUtil.createCell(byte[]row,
 byte[]family,
@@ -453,7 +446,7 @@ service.
 Marked as audience Private as of 1.2.0.
 
 
-
+
 static Cell
 CellUtil.createCell(byte[]rowArray,
 introwOffset,
@@ -465,7 +458,7 @@ service.
 intqualifierOffset,
 intqualifierLength)
 
-
+
 static Cell
 CellUtil.createFirstDeleteFamilyCellOnRow(byte[]row,
 
byte[]fam)
@@ -474,25 +467,25 @@ service.
  same row and family.
 
 
-
+
 static Cell
 CellUtil.createFirstOnNextRow(Cellcell)
 Create a Cell that is smaller than all other possible Cells 
for the given Cell row's next row.
 
 
-
+
 static Cell
 CellUtil.createFirstOnRow(Cellcell)
 Create a Cell that is smaller than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 CellUtil.createFirstOnRowCol(Cellcell)
 Create a Cell that is smaller than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 CellUtil.createFirstOnRowCol(Cellcell,
   byte[]qArray,
@@ -502,26 +495,26 @@ service.
  passed qualifier.
 
 
-
+
 static Cell
 CellUtil.createFirstOnRowColTS(Cellcell,
   longts)
 Creates the first cell with the row/family/qualifier of 
this cell and the given timestamp.
 
 
-
+
 static Cell
 CellUtil.createLastOnRow(Cellcell)
 Create a Cell that is larger than all other possible Cells 
for the given Cell's row.
 
 
-
+
 static Cell
 CellUtil.createLastOnRowCol(Cellcell)
 Create a Cell that is larger than all other possible Cells 
for the given Cell's rk:cf:q.
 
 
-
+
 Cell
 CellScanner.current()
 
@@ -1260,32 +1253,32 @@ service.
 
 
 
-boolean
-KeyValue.KVComparator.matchingRowColumn(Cellleft,
+static boolean
+CellUtil.matchingRowColumn(Cellleft,
   Cellright)
-Deprecated.
 Compares the row and column of two keyvalues for 
equality
 
 
 
-static boolean
-CellUtil.matchingRowColumn(Cellleft,
+boolean
+KeyValue.KVComparator.matchingRowColumn(Cellleft,
   Cellright)
+Deprecated.
 Compares the row and column of two keyvalues for 
equality
 
 
 
-boolean
-KeyValue.KVComparator.matchingRows(Cellleft,
+static boolean
+CellUtil.matchingRows(Cellleft,
 Cellright)
-Deprecated.
 Compares the row of two keyvalues for equality
 
 
 
-static boolean
-CellUtil.matchingRows(Cellleft,
+boolean
+KeyValue.KVComparator.matchingRows(Cellleft,
 Cellright)
+Deprecated.
 Compares the row of two keyvalues for equality
 
 
@@ -1645,23 +1638,23 @@ service.
 
 
 
-Put
-Put.add(Cellkv)
-Add the specified KeyValue to this Put operation.
-
-
-
 Append
 Append.add(Cellcell)
 Add column and value to this Append operation.
 
 
-
+
 Increment
 Increment.add(Cellcell)
 Add the specified KeyValue to this operation.
 
 
+
+Put
+Put.add(Cellkv)
+Add the specified KeyValue to this Put operation.
+
+
 
 Delete
 Delete.addDeleteMarker(Cellkv)
@@ -1750,13 +1743,13 @@ service.
 booleanpartial)
 
 
-Put
-Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
-
-
 Append
 

[1/2] hbase git commit: HBASE-15745 Refactor RPC classes to better accept async changes

2016-05-06 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 86ca09e0e -> 56358a0fd


http://git-wip-us.apache.org/repos/asf/hbase/blob/56358a0f/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
index fba0373..6ffb579 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RegionServerCoprocessorRpcChannel.java
@@ -36,7 +36,7 @@ import com.google.protobuf.RpcController;
  * @see 
org.apache.hadoop.hbase.client.HBaseAdmin#coprocessorService(ServerName)
  */
 @InterfaceAudience.Private
-public class RegionServerCoprocessorRpcChannel extends CoprocessorRpcChannel {
+public class RegionServerCoprocessorRpcChannel extends 
SyncCoprocessorRpcChannel {
   private static final Log LOG = 
LogFactory.getLog(RegionServerCoprocessorRpcChannel.class);
   private final ClusterConnection connection;
   private final ServerName serverName;

http://git-wip-us.apache.org/repos/asf/hbase/blob/56358a0f/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/SyncCoprocessorRpcChannel.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/SyncCoprocessorRpcChannel.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/SyncCoprocessorRpcChannel.java
new file mode 100644
index 000..af8ddd4
--- /dev/null
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/SyncCoprocessorRpcChannel.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.ipc;
+
+import com.google.protobuf.Descriptors;
+import com.google.protobuf.Message;
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
+import org.apache.hadoop.hbase.protobuf.ResponseConverter;
+
+/**
+ * Base class which provides clients with an RPC connection to
+ * call coprocessor endpoint {@link com.google.protobuf.Service}s.
+ * Note that clients should not use this class directly, except through
+ * {@link 
org.apache.hadoop.hbase.client.HTableInterface#coprocessorService(byte[])}.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public abstract class SyncCoprocessorRpcChannel implements 
CoprocessorRpcChannel {
+  private static final Log LOG = 
LogFactory.getLog(SyncCoprocessorRpcChannel.class);
+
+  @Override
+  @InterfaceAudience.Private
+  public void callMethod(Descriptors.MethodDescriptor method,
+ RpcController controller,
+ Message request, Message responsePrototype,
+ RpcCallback callback) {
+Message response = null;
+try {
+  response = callExecService(controller, method, request, 
responsePrototype);
+} catch (IOException ioe) {
+  LOG.warn("Call failed on IOException", ioe);
+  ResponseConverter.setControllerException(controller, ioe);
+}
+if (callback != null) {
+  callback.run(response);
+}
+  }
+
+  @Override
+  @InterfaceAudience.Private
+  public Message callBlockingMethod(Descriptors.MethodDescriptor method,
+RpcController controller,
+Message request, Message responsePrototype)
+  throws ServiceException {
+try {
+  return callExecService(controller, method, request, responsePrototype);
+} catch (IOException ioe) {
+  throw new ServiceException("Error calling method "+method.getFullName(), 
ioe);
+}
+  }
+
+  protected abstract Message callExecService(RpcController controller,
+  

[2/2] hbase git commit: HBASE-15745 Refactor RPC classes to better accept async changes

2016-05-06 Thread stack
HBASE-15745 Refactor RPC classes to better accept async changes

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/56358a0f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/56358a0f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/56358a0f

Branch: refs/heads/master
Commit: 56358a0fd3e93b7ba8dc2a9cd8d92d961a1c24a9
Parents: 86ca09e
Author: Jurriaan Mous 
Authored: Sun May 1 15:01:01 2016 +0200
Committer: stack 
Committed: Fri May 6 13:43:45 2016 -0700

--
 .../client/AbstractRegionServerCallable.java| 158 
 .../client/FastFailInterceptorContext.java  |   5 +-
 .../client/NoOpRetryingInterceptorContext.java  |   4 +-
 .../hbase/client/RegionServerCallable.java  | 107 +--
 .../hadoop/hbase/client/RetryingCallable.java   |  34 +-
 .../hbase/client/RetryingCallableBase.java  |  60 ++
 .../RetryingCallerInterceptorContext.java   |   4 +-
 .../hadoop/hbase/ipc/AsyncRpcChannel.java   | 700 +
 .../hadoop/hbase/ipc/AsyncRpcChannelImpl.java   | 743 +++
 .../apache/hadoop/hbase/ipc/AsyncRpcClient.java |   8 +-
 .../hbase/ipc/AsyncServerResponseHandler.java   |   4 +-
 .../hadoop/hbase/ipc/CoprocessorRpcChannel.java |  64 +-
 .../hbase/ipc/MasterCoprocessorRpcChannel.java  |   2 +-
 .../hbase/ipc/RegionCoprocessorRpcChannel.java  |  12 +-
 .../ipc/RegionServerCoprocessorRpcChannel.java  |   2 +-
 .../hbase/ipc/SyncCoprocessorRpcChannel.java|  79 ++
 16 files changed, 1106 insertions(+), 880 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/56358a0f/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AbstractRegionServerCallable.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AbstractRegionServerCallable.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AbstractRegionServerCallable.java
new file mode 100644
index 000..ee9a781
--- /dev/null
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AbstractRegionServerCallable.java
@@ -0,0 +1,158 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.client;
+
+import java.io.IOException;
+import java.net.ConnectException;
+import java.net.SocketTimeoutException;
+
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HRegionLocation;
+import org.apache.hadoop.hbase.NotServingRegionException;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.exceptions.RegionMovedException;
+import org.apache.hadoop.hbase.util.Bytes;
+
+/**
+ * Implementations call a RegionServer.
+ * Passed to a {@link RpcRetryingCaller} so we retry on fail.
+ * TODO: this class is actually tied to one region, because most of the paths 
make use of
+ *   the regioninfo part of location when building requests. The only 
reason it works for
+ *   multi-region requests (e.g. batch) is that they happen to not use the 
region parts.
+ *   This could be done cleaner (e.g. having a generic parameter and 2 
derived classes,
+ *   RegionCallable and actual RegionServerCallable with ServerName.
+ * @param  the class that the ServerCallable handles
+ */
+@InterfaceAudience.Private
+abstract class AbstractRegionServerCallable implements RetryingCallableBase 
{
+  protected final Connection connection;
+  protected final TableName tableName;
+  protected final byte[] row;
+  protected HRegionLocation location;
+
+  protected final static int MIN_WAIT_DEAD_SERVER = 1;
+
+  /**
+   * @param connection Connection to use.
+   * @param tableName Table name to which row belongs.
+   * @param row The row we want in tableName.
+   */
+  public AbstractRegionServerCallable(Connection connection, TableName 
tableName, byte[] row) {
+this.connection = 

hbase git commit: HBASE-15773 Improvements to CellCounter job

2016-05-06 Thread garyh
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 5ef9d4752 -> 805746150


HBASE-15773 Improvements to CellCounter job


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/80574615
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/80574615
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/80574615

Branch: refs/heads/branch-1.3
Commit: 80574615085c9ea1a722546b86e22525e3232bd5
Parents: 5ef9d47
Author: Gary Helmling 
Authored: Thu May 5 12:40:47 2016 -0700
Committer: Gary Helmling 
Committed: Fri May 6 11:19:17 2016 -0700

--
 .../hadoop/hbase/mapreduce/CellCounter.java | 124 ---
 .../hbase/mapreduce/TableInputFormat.java   |  88 +++--
 2 files changed, 129 insertions(+), 83 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/80574615/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
index b67932f..09290fd 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
@@ -88,7 +88,30 @@ public class CellCounter {
  * Counter enumeration to count the actual rows.
  */
 public static enum Counters {
-  ROWS
+  ROWS,
+  CELLS
+}
+
+private Configuration conf;
+private String separator;
+
+// state of current row, family, column needs to persist across map() 
invocations
+// in order to properly handle scanner batching, where a single qualifier 
may have too
+// many versions for a single map() call
+private byte[] lastRow;
+private String currentRowKey;
+byte[] currentFamily = null;
+String currentFamilyName = null;
+byte[] currentQualifier = null;
+// family + qualifier
+String currentQualifierName = null;
+// rowkey + family + qualifier
+String currentRowQualifierName = null;
+
+@Override
+protected void setup(Context context) throws IOException, 
InterruptedException {
+  conf = context.getConfiguration();
+  separator = conf.get("ReportSeparator",":");
 }
 
 /**
@@ -108,48 +131,45 @@ public class CellCounter {
 throws IOException {
   Preconditions.checkState(values != null,
   "values passed to the map is null");
-  String currentFamilyName = null;
-  String currentQualifierName = null;
-  String currentRowKey = null;
-  Configuration config = context.getConfiguration();
-  String separator = config.get("ReportSeparator",":");
-  try {
-context.getCounter(Counters.ROWS).increment(1);
-context.write(new Text("Total ROWS"), new IntWritable(1));
 
-for (Cell value : values.listCells()) {
-  currentRowKey = Bytes.toStringBinary(CellUtil.cloneRow(value));
-  String thisRowFamilyName = 
Bytes.toStringBinary(CellUtil.cloneFamily(value));
-  if (!thisRowFamilyName.equals(currentFamilyName)) {
-currentFamilyName = thisRowFamilyName;
-context.getCounter("CF", thisRowFamilyName).increment(1);
-if (1 == context.getCounter("CF", thisRowFamilyName).getValue()) {
-  context.write(new Text("Total Families Across all Rows"), new 
IntWritable(1));
-  context.write(new Text(thisRowFamilyName), new IntWritable(1));
+  try {
+byte[] currentRow = values.getRow();
+if (lastRow == null || !Bytes.equals(lastRow, currentRow)) {
+  lastRow = currentRow;
+  currentRowKey = Bytes.toStringBinary(currentRow);
+  currentFamily = null;
+  currentQualifier = null;
+  context.getCounter(Counters.ROWS).increment(1);
+  context.write(new Text("Total ROWS"), new IntWritable(1));
+}
+if (!values.isEmpty()) {
+  int cellCount = 0;
+  for (Cell value : values.listCells()) {
+cellCount++;
+if (currentFamily == null || !CellUtil.matchingFamily(value, 
currentFamily)) {
+  currentFamily = CellUtil.cloneFamily(value);
+  currentFamilyName = Bytes.toStringBinary(currentFamily);
+  currentQualifier = null;
+  context.getCounter("CF", currentFamilyName).increment(1);
+  if (1 == context.getCounter("CF", currentFamilyName).getValue()) 
{
+context.write(new Text("Total Families Across all Rows"), new 
IntWritable(1));
+context.write(new Text(currentFamily), new IntWritable(1));
+  }
   

hbase git commit: HBASE-15773 Improvements to CellCounter job

2016-05-06 Thread garyh
Repository: hbase
Updated Branches:
  refs/heads/branch-1 0964884b9 -> e44c36035


HBASE-15773 Improvements to CellCounter job


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e44c3603
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e44c3603
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e44c3603

Branch: refs/heads/branch-1
Commit: e44c3603504972d00a3be9a6d6cd296bd5734b87
Parents: 0964884
Author: Gary Helmling 
Authored: Thu May 5 12:40:47 2016 -0700
Committer: Gary Helmling 
Committed: Fri May 6 11:18:24 2016 -0700

--
 .../hadoop/hbase/mapreduce/CellCounter.java | 124 ---
 .../hbase/mapreduce/TableInputFormat.java   |  88 +++--
 2 files changed, 129 insertions(+), 83 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e44c3603/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
index b67932f..09290fd 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
@@ -88,7 +88,30 @@ public class CellCounter {
  * Counter enumeration to count the actual rows.
  */
 public static enum Counters {
-  ROWS
+  ROWS,
+  CELLS
+}
+
+private Configuration conf;
+private String separator;
+
+// state of current row, family, column needs to persist across map() 
invocations
+// in order to properly handle scanner batching, where a single qualifier 
may have too
+// many versions for a single map() call
+private byte[] lastRow;
+private String currentRowKey;
+byte[] currentFamily = null;
+String currentFamilyName = null;
+byte[] currentQualifier = null;
+// family + qualifier
+String currentQualifierName = null;
+// rowkey + family + qualifier
+String currentRowQualifierName = null;
+
+@Override
+protected void setup(Context context) throws IOException, 
InterruptedException {
+  conf = context.getConfiguration();
+  separator = conf.get("ReportSeparator",":");
 }
 
 /**
@@ -108,48 +131,45 @@ public class CellCounter {
 throws IOException {
   Preconditions.checkState(values != null,
   "values passed to the map is null");
-  String currentFamilyName = null;
-  String currentQualifierName = null;
-  String currentRowKey = null;
-  Configuration config = context.getConfiguration();
-  String separator = config.get("ReportSeparator",":");
-  try {
-context.getCounter(Counters.ROWS).increment(1);
-context.write(new Text("Total ROWS"), new IntWritable(1));
 
-for (Cell value : values.listCells()) {
-  currentRowKey = Bytes.toStringBinary(CellUtil.cloneRow(value));
-  String thisRowFamilyName = 
Bytes.toStringBinary(CellUtil.cloneFamily(value));
-  if (!thisRowFamilyName.equals(currentFamilyName)) {
-currentFamilyName = thisRowFamilyName;
-context.getCounter("CF", thisRowFamilyName).increment(1);
-if (1 == context.getCounter("CF", thisRowFamilyName).getValue()) {
-  context.write(new Text("Total Families Across all Rows"), new 
IntWritable(1));
-  context.write(new Text(thisRowFamilyName), new IntWritable(1));
+  try {
+byte[] currentRow = values.getRow();
+if (lastRow == null || !Bytes.equals(lastRow, currentRow)) {
+  lastRow = currentRow;
+  currentRowKey = Bytes.toStringBinary(currentRow);
+  currentFamily = null;
+  currentQualifier = null;
+  context.getCounter(Counters.ROWS).increment(1);
+  context.write(new Text("Total ROWS"), new IntWritable(1));
+}
+if (!values.isEmpty()) {
+  int cellCount = 0;
+  for (Cell value : values.listCells()) {
+cellCount++;
+if (currentFamily == null || !CellUtil.matchingFamily(value, 
currentFamily)) {
+  currentFamily = CellUtil.cloneFamily(value);
+  currentFamilyName = Bytes.toStringBinary(currentFamily);
+  currentQualifier = null;
+  context.getCounter("CF", currentFamilyName).increment(1);
+  if (1 == context.getCounter("CF", currentFamilyName).getValue()) 
{
+context.write(new Text("Total Families Across all Rows"), new 
IntWritable(1));
+context.write(new Text(currentFamily), new IntWritable(1));
+  }
 }

hbase git commit: HBASE-15773 Improvements to CellCounter job

2016-05-06 Thread garyh
Repository: hbase
Updated Branches:
  refs/heads/master d90f0571e -> 86ca09e0e


HBASE-15773 Improvements to CellCounter job


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/86ca09e0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/86ca09e0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/86ca09e0

Branch: refs/heads/master
Commit: 86ca09e0e581b897582124938004e948fe38df3b
Parents: d90f057
Author: Gary Helmling 
Authored: Thu May 5 12:40:47 2016 -0700
Committer: Gary Helmling 
Committed: Fri May 6 11:08:18 2016 -0700

--
 .../hadoop/hbase/mapreduce/CellCounter.java | 117 ---
 .../hbase/mapreduce/TableInputFormat.java   |  88 --
 2 files changed, 125 insertions(+), 80 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/86ca09e0/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
index aaa32bd..73f9b93 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
@@ -92,7 +92,30 @@ public class CellCounter extends Configured implements Tool {
  * Counter enumeration to count the actual rows.
  */
 public static enum Counters {
-  ROWS
+  ROWS,
+  CELLS
+}
+
+private Configuration conf;
+private String separator;
+
+// state of current row, family, column needs to persist across map() 
invocations
+// in order to properly handle scanner batching, where a single qualifier 
may have too
+// many versions for a single map() call
+private byte[] lastRow;
+private String currentRowKey;
+byte[] currentFamily = null;
+String currentFamilyName = null;
+byte[] currentQualifier = null;
+// family + qualifier
+String currentQualifierName = null;
+// rowkey + family + qualifier
+String currentRowQualifierName = null;
+
+@Override
+protected void setup(Context context) throws IOException, 
InterruptedException {
+  conf = context.getConfiguration();
+  separator = conf.get("ReportSeparator",":");
 }
 
 /**
@@ -112,49 +135,45 @@ public class CellCounter extends Configured implements 
Tool {
 throws IOException {
   Preconditions.checkState(values != null,
   "values passed to the map is null");
-  String currentFamilyName = null;
-  String currentQualifierName = null;
-  String currentRowKey = null;
-  Configuration config = context.getConfiguration();
-  String separator = config.get("ReportSeparator",":");
+
   try {
-context.getCounter(Counters.ROWS).increment(1);
-context.write(new Text("Total ROWS"), new IntWritable(1));
-if (values != null && !values.isEmpty()) {
+byte[] currentRow = values.getRow();
+if (lastRow == null || !Bytes.equals(lastRow, currentRow)) {
+  lastRow = currentRow;
+  currentRowKey = Bytes.toStringBinary(currentRow);
+  currentFamily = null;
+  currentQualifier = null;
+  context.getCounter(Counters.ROWS).increment(1);
+  context.write(new Text("Total ROWS"), new IntWritable(1));
+}
+if (!values.isEmpty()) {
+  int cellCount = 0;
   for (Cell value : values.listCells()) {
-currentRowKey = Bytes.toStringBinary(CellUtil.cloneRow(value));
-String thisRowFamilyName = 
Bytes.toStringBinary(CellUtil.cloneFamily(value));
-if (!thisRowFamilyName.equals(currentFamilyName)) {
-  currentFamilyName = thisRowFamilyName;
-  context.getCounter("CF", thisRowFamilyName).increment(1);
-  if (1 == context.getCounter("CF", thisRowFamilyName).getValue()) 
{
+cellCount++;
+if (currentFamily == null || !CellUtil.matchingFamily(value, 
currentFamily)) {
+  currentFamily = CellUtil.cloneFamily(value);
+  currentFamilyName = Bytes.toStringBinary(currentFamily);
+  currentQualifier = null;
+  context.getCounter("CF", currentFamilyName).increment(1);
+  if (1 == context.getCounter("CF", currentFamilyName).getValue()) 
{
 context.write(new Text("Total Families Across all Rows"), new 
IntWritable(1));
-context.write(new Text(thisRowFamilyName), new IntWritable(1));
+context.write(new Text(currentFamily), new IntWritable(1));
   }
 }
-String 

hbase git commit: HBASE-15781 Remove unused TableEventHandler and TotesHRegionInfo

2016-05-06 Thread mbertozzi
Repository: hbase
Updated Branches:
  refs/heads/master 387c7e6b0 -> d90f0571e


HBASE-15781 Remove unused TableEventHandler and TotesHRegionInfo


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d90f0571
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d90f0571
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d90f0571

Branch: refs/heads/master
Commit: d90f0571e66211ef6ce3b97dd14f35ce8c6bdc9d
Parents: 387c7e6
Author: Matteo Bertozzi 
Authored: Fri May 6 10:05:42 2016 -0700
Committer: Matteo Bertozzi 
Committed: Fri May 6 10:05:42 2016 -0700

--
 .../hbase/master/handler/TableEventHandler.java | 259 ---
 .../hbase/master/handler/TotesHRegionInfo.java  |  36 ---
 2 files changed, 295 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d90f0571/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/TableEventHandler.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/TableEventHandler.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/TableEventHandler.java
deleted file mode 100644
index e93ad57..000
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/TableEventHandler.java
+++ /dev/null
@@ -1,259 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.master.handler;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.NavigableMap;
-import java.util.TreeMap;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.CoordinatedStateException;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.InvalidFamilyOperationException;
-import org.apache.hadoop.hbase.MetaTableAccessor;
-import org.apache.hadoop.hbase.Server;
-import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableDescriptor;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.TableNotDisabledException;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.RegionLocator;
-import org.apache.hadoop.hbase.client.TableState;
-import org.apache.hadoop.hbase.executor.EventHandler;
-import org.apache.hadoop.hbase.executor.EventType;
-import org.apache.hadoop.hbase.master.BulkReOpen;
-import org.apache.hadoop.hbase.master.MasterServices;
-import org.apache.hadoop.hbase.master.TableLockManager.TableLock;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-
-/**
- * Base class for performing operations against tables.
- * Checks on whether the process can go forward are done in constructor rather
- * than later on in {@link #process()}.  The idea is to fail fast rather than
- * later down in an async invocation of {@link #process()} (which currently has
- * no means of reporting back issues once started).
- */
-@InterfaceAudience.Private
-public abstract class TableEventHandler extends EventHandler {
-  private static final Log LOG = LogFactory.getLog(TableEventHandler.class);
-  protected final MasterServices masterServices;
-  protected final TableName tableName;
-  protected TableLock tableLock;
-  private boolean isPrepareCalled = false;
-
-  public TableEventHandler(EventType eventType, TableName tableName, Server 
server,
-  MasterServices masterServices) {
-super(server, eventType);
-this.masterServices = masterServices;
-this.tableName = tableName;
-  }
-
-  public TableEventHandler prepare() 

[1/2] hbase git commit: HBASE-15771 Document all the public classes

2016-05-06 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 3b71d0019 -> 7b1786e8f


HBASE-15771 Document all the public classes

Summary: Add on a bunch of documentation around implementation so far.

Test Plan:
make doc
Doxygen has no warnings

Differential Revision: https://reviews.facebook.net/D57753


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bce08a58
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bce08a58
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bce08a58

Branch: refs/heads/HBASE-14850
Commit: bce08a5855e112042e12f815db73aacc6c92abf8
Parents: 3b71d00
Author: Elliott Clark 
Authored: Thu May 5 13:14:20 2016 -0700
Committer: Elliott Clark 
Committed: Fri May 6 09:07:28 2016 -0700

--
 hbase-native-client/BUILDING.md | 18 +-
 .../connection/client-dispatcher.h  | 11 +++-
 .../connection/client-handler.cc| 37 +---
 hbase-native-client/connection/client-handler.h | 36 +++-
 .../connection/connection-factory.h | 18 ++
 .../connection/connection-pool.cc   |  5 +-
 .../connection/connection-pool.h| 44 +-
 hbase-native-client/connection/pipeline.h   | 18 ++
 hbase-native-client/connection/request.h| 32 ++
 hbase-native-client/connection/response.h   | 33 +--
 hbase-native-client/core/BUCK   |  1 -
 hbase-native-client/core/client.h   | 12 
 hbase-native-client/core/connection.cc  | 20 ---
 hbase-native-client/core/connection.h   | 26 -
 hbase-native-client/core/location-cache.cc  | 14 -
 hbase-native-client/core/location-cache.h   | 36 ++--
 hbase-native-client/core/meta-utils.h   | 12 
 hbase-native-client/core/put.cc | 21 ---
 hbase-native-client/core/put.h  | 27 -
 hbase-native-client/core/region-location.h  | 41 +
 hbase-native-client/serde/rpc.cc|  1 -
 hbase-native-client/serde/rpc.h | 61 +++-
 hbase-native-client/serde/zk.h  | 14 +
 hbase-native-client/utils/user-util.h   | 18 ++
 24 files changed, 387 insertions(+), 169 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bce08a58/hbase-native-client/BUILDING.md
--
diff --git a/hbase-native-client/BUILDING.md b/hbase-native-client/BUILDING.md
index 20ef2a0..4c06776 100644
--- a/hbase-native-client/BUILDING.md
+++ b/hbase-native-client/BUILDING.md
@@ -17,7 +17,7 @@ specific language governing permissions and limitations
 under the License.
 -->
 
-#Building HBase native client
+# Building HBase native client
 
 The HBase native client build using buck and produces a linux library.
 
@@ -58,6 +58,20 @@ buck build //core:simple-client
 ```
 
 That will build the library, then build and test everything, then build
-the simple-client binary. Buck will find all modules used, and compile 
+the simple-client binary. Buck will find all modules used, and compile
 them in parallel, caching the results. Output from buck is in the buck-out
 foulder. Generated binaries are in buck-out/gen logs are in buck-out/logs
+
+
+# Make
+
+If learning buck isn't your thing there is a Makefile wrapper for your
+convenience.
+
+```
+make help
+make check
+make clean
+make all
+make build
+```

http://git-wip-us.apache.org/repos/asf/hbase/blob/bce08a58/hbase-native-client/connection/client-dispatcher.h
--
diff --git a/hbase-native-client/connection/client-dispatcher.h 
b/hbase-native-client/connection/client-dispatcher.h
index 4435a1b..4bfb35d 100644
--- a/hbase-native-client/connection/client-dispatcher.h
+++ b/hbase-native-client/connection/client-dispatcher.h
@@ -30,17 +30,22 @@
 #include "connection/response.h"
 
 namespace hbase {
+/**
+ * Dispatcher that assigns a call_id and then routes the response back to the 
future.
+ */
 class ClientDispatcher
 : public wangle::ClientDispatcherBase {
 public:
+  /** Create a new ClientDispatcher */
   ClientDispatcher();
-  ~ClientDispatcher() {
-LOG(ERROR) << "Killing ClientDispatcher call_id = " << current_call_id_;
-  }
+  /** Read a response off the pipeline. */
   void read(Context *ctx, Response in) override;
+  /** Take a request as a call and send it down the pipeline. */
   folly::Future operator()(std::unique_ptr arg) override;
+  /** Close the dispatcher and the associated pipeline. */
   folly::Future close(Context *ctx) 

[2/2] hbase git commit: HBASE-15761 Add on more server name tests

2016-05-06 Thread eclark
HBASE-15761 Add on more server name tests


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7b1786e8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7b1786e8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7b1786e8

Branch: refs/heads/HBASE-14850
Commit: 7b1786e8fb92c75ca9f37fb388b3ba8c4e9c9709
Parents: bce08a5
Author: Elliott Clark 
Authored: Wed May 4 12:04:18 2016 -0700
Committer: Elliott Clark 
Committed: Fri May 6 09:11:38 2016 -0700

--
 hbase-native-client/serde/server-name-test.cc | 18 ++
 hbase-native-client/serde/server-name.h   |  4 +++-
 2 files changed, 21 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7b1786e8/hbase-native-client/serde/server-name-test.cc
--
diff --git a/hbase-native-client/serde/server-name-test.cc 
b/hbase-native-client/serde/server-name-test.cc
index 35dcbc1..2281fa2 100644
--- a/hbase-native-client/serde/server-name-test.cc
+++ b/hbase-native-client/serde/server-name-test.cc
@@ -30,3 +30,21 @@ TEST(TestServerName, TestMakeServerName) {
   ASSERT_EQ("test", sn.host_name());
   ASSERT_EQ(123, sn.port());
 }
+
+TEST(TestServerName, TestIps) {
+  auto sn = folly::to("127.0.0.1:999");
+  ASSERT_EQ("127.0.0.1", sn.host_name());
+  ASSERT_EQ(999, sn.port());
+}
+
+TEST(TestServerName, TestThrow) {
+  ASSERT_ANY_THROW(folly::to("Ther's no colon here"));
+}
+
+TEST(TestServerName, TestIPV6) {
+  auto sn = folly::to("[1]:123");
+
+  ASSERT_EQ("[1]", sn.host_name());
+  ASSERT_EQ(123, sn.port());
+
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/7b1786e8/hbase-native-client/serde/server-name.h
--
diff --git a/hbase-native-client/serde/server-name.h 
b/hbase-native-client/serde/server-name.h
index bdba087..9844465 100644
--- a/hbase-native-client/serde/server-name.h
+++ b/hbase-native-client/serde/server-name.h
@@ -12,7 +12,9 @@ template  void parseTo(String in, ServerName 
) {
   std::string s = folly::to(in);
 
   auto delim = s.rfind(":");
-  DCHECK(delim != std::string::npos);
+  if (delim == std::string::npos) {
+throw std::runtime_error("Couldn't parse server name");
+  }
   out.set_host_name(s.substr(0, delim));
   // Now keep everything after the : (delim + 1) to the end.
   out.set_port(folly::to(s.substr(delim + 1)));



hbase git commit: HBASE-15782 TestShell fails due to some moved types

2016-05-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 34e9a6ff3 -> 387c7e6b0


HBASE-15782 TestShell fails due to some moved types


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/387c7e6b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/387c7e6b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/387c7e6b

Branch: refs/heads/master
Commit: 387c7e6b083fddeae2a7ebe1fef3546f38ef9fb5
Parents: 34e9a6f
Author: tedyu 
Authored: Fri May 6 06:11:12 2016 -0700
Committer: tedyu 
Committed: Fri May 6 06:11:12 2016 -0700

--
 hbase-shell/src/main/ruby/hbase/admin.rb | 11 +--
 1 file changed, 5 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/387c7e6b/hbase-shell/src/main/ruby/hbase/admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb 
b/hbase-shell/src/main/ruby/hbase/admin.rb
index 88486c0..7ea315f 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -24,7 +24,6 @@ java_import org.apache.hadoop.hbase.util.RegionSplitter
 java_import org.apache.hadoop.hbase.util.Bytes
 java_import org.apache.hadoop.hbase.ServerName
 java_import org.apache.hadoop.hbase.TableName
-java_import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos::SnapshotDescription
 
 # Wrapper for org.apache.hadoop.hbase.client.HBaseAdmin
 
@@ -68,9 +67,9 @@ module Hbase
   end
   compact_type = nil
   if type == "NORMAL"
-compact_type = 
org.apache.hadoop.hbase.client.Admin::CompactType::NORMAL
+compact_type = org.apache.hadoop.hbase.client.CompactType::NORMAL
   elsif type == "MOB"
-compact_type = org.apache.hadoop.hbase.client.Admin::CompactType::MOB
+compact_type = org.apache.hadoop.hbase.client.CompactType::MOB
   else
 raise ArgumentError, "only NORMAL or MOB accepted for type!"
   end
@@ -96,9 +95,9 @@ module Hbase
   end
   compact_type = nil
   if type == "NORMAL"
-compact_type = 
org.apache.hadoop.hbase.client.Admin::CompactType::NORMAL
+compact_type = org.apache.hadoop.hbase.client.CompactType::NORMAL
   elsif type == "MOB"
-compact_type = org.apache.hadoop.hbase.client.Admin::CompactType::MOB
+compact_type = org.apache.hadoop.hbase.client.CompactType::MOB
   else
 raise ArgumentError, "only NORMAL or MOB accepted for type!"
   end
@@ -955,7 +954,7 @@ module Hbase
  args.each do |arg|
 if arg[SKIP_FLUSH] == true
   @admin.snapshot(snapshot_name, table_name,
-  SnapshotDescription::Type::SKIPFLUSH)
+  
org.apache.hadoop.hbase.client.SnapshotType::SKIPFLUSH)
 else
@admin.snapshot(snapshot_name, table_name)
 end



hbase git commit: HBASE-15669 HFile size is not considered correctly in a replication request

2016-05-06 Thread ashishsinghi
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 f5b57cd04 -> 5ef9d4752


HBASE-15669 HFile size is not considered correctly in a replication request


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5ef9d475
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5ef9d475
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5ef9d475

Branch: refs/heads/branch-1.3
Commit: 5ef9d475281b498a3c97b3842aa15699965109a7
Parents: f5b57cd
Author: Ashish Singhi 
Authored: Fri May 6 17:28:06 2016 +0530
Committer: Ashish Singhi 
Committed: Fri May 6 17:30:22 2016 +0530

--
 .../hadoop/hbase/protobuf/ProtobufUtil.java |  12 +-
 .../hbase/protobuf/generated/WALProtos.java | 159 ---
 hbase-protocol/src/main/protobuf/WAL.proto  |   1 +
 .../hadoop/hbase/regionserver/HRegion.java  |  18 ++-
 .../regionserver/ReplicationSource.java |  44 -
 .../regionserver/TestReplicationSink.java   |   4 +-
 .../TestReplicationSourceManager.java   |  25 ++-
 7 files changed, 229 insertions(+), 34 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5ef9d475/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 82f5f0d..08cf6fa 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -3128,13 +3128,16 @@ public final class ProtobufUtil {
* @param tableName The tableName into which the bulk load is being 
imported into.
* @param encodedRegionName Encoded region name of the region which is being 
bulk loaded.
* @param storeFilesA set of store files of a column family are bulk 
loaded.
+   * @param storeFilesSize  Map of store files and their lengths
* @param bulkloadSeqId sequence ID (by a force flush) used to create 
bulk load hfile
*  name
* @return The WAL log marker for bulk loads.
*/
   public static WALProtos.BulkLoadDescriptor toBulkLoadDescriptor(TableName 
tableName,
-  ByteString encodedRegionName, Map storeFiles, long 
bulkloadSeqId) {
-BulkLoadDescriptor.Builder desc = BulkLoadDescriptor.newBuilder()
+  ByteString encodedRegionName, Map storeFiles,
+  Map storeFilesSize, long bulkloadSeqId) {
+BulkLoadDescriptor.Builder desc =
+BulkLoadDescriptor.newBuilder()
 .setTableName(ProtobufUtil.toProtoTableName(tableName))
 
.setEncodedRegionName(encodedRegionName).setBulkloadSeqNum(bulkloadSeqId);
 
@@ -3143,7 +3146,10 @@ public final class ProtobufUtil {
   .setFamilyName(ByteStringer.wrap(entry.getKey()))
   .setStoreHomeDir(Bytes.toString(entry.getKey())); // relative to 
region
   for (Path path : entry.getValue()) {
-builder.addStoreFile(path.getName());
+String name = path.getName();
+builder.addStoreFile(name);
+Long size = storeFilesSize.get(name) == null ? (Long) 0L : 
storeFilesSize.get(name);
+builder.setStoreFileSize(size);
   }
   desc.addStores(builder);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/5ef9d475/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
index d74688e..6252d51 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
@@ -7821,6 +7821,24 @@ public final class WALProtos {
  */
 com.google.protobuf.ByteString
 getStoreFileBytes(int index);
+
+// optional uint64 store_file_size = 4;
+/**
+ * optional uint64 store_file_size = 4;
+ *
+ * 
+ * size of store file
+ * 
+ */
+boolean hasStoreFileSize();
+/**
+ * optional uint64 store_file_size = 4;
+ *
+ * 
+ * size of store file
+ * 
+ */
+long getStoreFileSize();
   }
   /**
* Protobuf type {@code hbase.pb.StoreDescriptor}
@@ -7891,6 +7909,11 @@ public final class WALProtos {
   storeFile_.add(input.readBytes());
   break;
 }
+case 32: {
+   

hbase git commit: HBASE-15669 HFile size is not considered correctly in a replication request

2016-05-06 Thread ashishsinghi
Repository: hbase
Updated Branches:
  refs/heads/branch-1 b9df7978f -> 0964884b9


HBASE-15669 HFile size is not considered correctly in a replication request


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0964884b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0964884b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0964884b

Branch: refs/heads/branch-1
Commit: 0964884b925f251725bcd101f23f77a5d3d829e1
Parents: b9df797
Author: Ashish Singhi 
Authored: Fri May 6 17:28:06 2016 +0530
Committer: Ashish Singhi 
Committed: Fri May 6 17:28:06 2016 +0530

--
 .../hadoop/hbase/protobuf/ProtobufUtil.java |  12 +-
 .../hbase/protobuf/generated/WALProtos.java | 159 ---
 hbase-protocol/src/main/protobuf/WAL.proto  |   1 +
 .../hadoop/hbase/regionserver/HRegion.java  |  18 ++-
 .../regionserver/ReplicationSource.java |  44 -
 .../regionserver/TestReplicationSink.java   |   4 +-
 .../TestReplicationSourceManager.java   |  25 ++-
 7 files changed, 229 insertions(+), 34 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0964884b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 82f5f0d..08cf6fa 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -3128,13 +3128,16 @@ public final class ProtobufUtil {
* @param tableName The tableName into which the bulk load is being 
imported into.
* @param encodedRegionName Encoded region name of the region which is being 
bulk loaded.
* @param storeFilesA set of store files of a column family are bulk 
loaded.
+   * @param storeFilesSize  Map of store files and their lengths
* @param bulkloadSeqId sequence ID (by a force flush) used to create 
bulk load hfile
*  name
* @return The WAL log marker for bulk loads.
*/
   public static WALProtos.BulkLoadDescriptor toBulkLoadDescriptor(TableName 
tableName,
-  ByteString encodedRegionName, Map storeFiles, long 
bulkloadSeqId) {
-BulkLoadDescriptor.Builder desc = BulkLoadDescriptor.newBuilder()
+  ByteString encodedRegionName, Map storeFiles,
+  Map storeFilesSize, long bulkloadSeqId) {
+BulkLoadDescriptor.Builder desc =
+BulkLoadDescriptor.newBuilder()
 .setTableName(ProtobufUtil.toProtoTableName(tableName))
 
.setEncodedRegionName(encodedRegionName).setBulkloadSeqNum(bulkloadSeqId);
 
@@ -3143,7 +3146,10 @@ public final class ProtobufUtil {
   .setFamilyName(ByteStringer.wrap(entry.getKey()))
   .setStoreHomeDir(Bytes.toString(entry.getKey())); // relative to 
region
   for (Path path : entry.getValue()) {
-builder.addStoreFile(path.getName());
+String name = path.getName();
+builder.addStoreFile(name);
+Long size = storeFilesSize.get(name) == null ? (Long) 0L : 
storeFilesSize.get(name);
+builder.setStoreFileSize(size);
   }
   desc.addStores(builder);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/0964884b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
index d74688e..6252d51 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
@@ -7821,6 +7821,24 @@ public final class WALProtos {
  */
 com.google.protobuf.ByteString
 getStoreFileBytes(int index);
+
+// optional uint64 store_file_size = 4;
+/**
+ * optional uint64 store_file_size = 4;
+ *
+ * 
+ * size of store file
+ * 
+ */
+boolean hasStoreFileSize();
+/**
+ * optional uint64 store_file_size = 4;
+ *
+ * 
+ * size of store file
+ * 
+ */
+long getStoreFileSize();
   }
   /**
* Protobuf type {@code hbase.pb.StoreDescriptor}
@@ -7891,6 +7909,11 @@ public final class WALProtos {
   storeFile_.add(input.readBytes());
   break;
 }
+case 32: {
+   

hbase git commit: HBASE-15669 HFile size is not considered correctly in a replication request

2016-05-06 Thread ashishsinghi
Repository: hbase
Updated Branches:
  refs/heads/master bec81b197 -> 34e9a6ff3


HBASE-15669 HFile size is not considered correctly in a replication request


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/34e9a6ff
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/34e9a6ff
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/34e9a6ff

Branch: refs/heads/master
Commit: 34e9a6ff301f40aa3f6ce33ac1b86f9e50fa6694
Parents: bec81b1
Author: Ashish Singhi 
Authored: Fri May 6 17:26:17 2016 +0530
Committer: Ashish Singhi 
Committed: Fri May 6 17:26:17 2016 +0530

--
 .../hadoop/hbase/protobuf/ProtobufUtil.java |  12 +-
 .../hbase/protobuf/generated/WALProtos.java | 159 ---
 hbase-protocol/src/main/protobuf/WAL.proto  |   1 +
 .../hadoop/hbase/regionserver/HRegion.java  |  18 ++-
 .../regionserver/ReplicationSource.java |  44 -
 .../regionserver/TestReplicationSink.java   |   4 +-
 .../TestReplicationSourceManager.java   |  25 ++-
 7 files changed, 229 insertions(+), 34 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/34e9a6ff/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 50a4920..62dfd45 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -3063,13 +3063,16 @@ public final class ProtobufUtil {
* @param tableName The tableName into which the bulk load is being 
imported into.
* @param encodedRegionName Encoded region name of the region which is being 
bulk loaded.
* @param storeFilesA set of store files of a column family are bulk 
loaded.
+   * @param storeFilesSize  Map of store files and their lengths
* @param bulkloadSeqId sequence ID (by a force flush) used to create 
bulk load hfile
*  name
* @return The WAL log marker for bulk loads.
*/
   public static WALProtos.BulkLoadDescriptor toBulkLoadDescriptor(TableName 
tableName,
-  ByteString encodedRegionName, Map storeFiles, long 
bulkloadSeqId) {
-BulkLoadDescriptor.Builder desc = BulkLoadDescriptor.newBuilder()
+  ByteString encodedRegionName, Map storeFiles,
+  Map storeFilesSize, long bulkloadSeqId) {
+BulkLoadDescriptor.Builder desc =
+BulkLoadDescriptor.newBuilder()
 .setTableName(ProtobufUtil.toProtoTableName(tableName))
 
.setEncodedRegionName(encodedRegionName).setBulkloadSeqNum(bulkloadSeqId);
 
@@ -3078,7 +3081,10 @@ public final class ProtobufUtil {
   .setFamilyName(ByteStringer.wrap(entry.getKey()))
   .setStoreHomeDir(Bytes.toString(entry.getKey())); // relative to 
region
   for (Path path : entry.getValue()) {
-builder.addStoreFile(path.getName());
+String name = path.getName();
+builder.addStoreFile(name);
+Long size = storeFilesSize.get(name) == null ? (Long) 0L : 
storeFilesSize.get(name);
+builder.setStoreFileSize(size);
   }
   desc.addStores(builder);
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/34e9a6ff/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
index d74688e..6252d51 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java
@@ -7821,6 +7821,24 @@ public final class WALProtos {
  */
 com.google.protobuf.ByteString
 getStoreFileBytes(int index);
+
+// optional uint64 store_file_size = 4;
+/**
+ * optional uint64 store_file_size = 4;
+ *
+ * 
+ * size of store file
+ * 
+ */
+boolean hasStoreFileSize();
+/**
+ * optional uint64 store_file_size = 4;
+ *
+ * 
+ * size of store file
+ * 
+ */
+long getStoreFileSize();
   }
   /**
* Protobuf type {@code hbase.pb.StoreDescriptor}
@@ -7891,6 +7909,11 @@ public final class WALProtos {
   storeFile_.add(input.readBytes());
   break;
 }
+case 32: {
+   

hbase git commit: HBASE-15608 Remove PB references from SnapShot related Exceptions (Ram)

2016-05-06 Thread ramkrishna
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 6844474a1 -> f5b57cd04


HBASE-15608 Remove PB references from SnapShot related Exceptions (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f5b57cd0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f5b57cd0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f5b57cd0

Branch: refs/heads/branch-1.3
Commit: f5b57cd048fb1ca0adfbf31e8686056956648c9f
Parents: 6844474
Author: Ramkrishna 
Authored: Fri May 6 17:08:32 2016 +0530
Committer: Ramkrishna 
Committed: Fri May 6 17:08:32 2016 +0530

--
 .../src/main/java/org/apache/hadoop/hbase/client/Admin.java   | 1 +
 .../apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java  | 1 +
 .../org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java  | 3 +++
 .../apache/hadoop/hbase/snapshot/RestoreSnapshotException.java| 2 ++
 .../apache/hadoop/hbase/snapshot/SnapshotCreationException.java   | 1 +
 .../hadoop/hbase/snapshot/SnapshotDoesNotExistException.java  | 1 +
 .../org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java | 1 +
 7 files changed, 10 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f5b57cd0/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index c8b91a9..7b1d016 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -1526,6 +1526,7 @@ public interface Admin extends Abortable, Closeable {
* */
   void releaseSplitOrMergeLockAndRollback() throws IOException;
 
+  @Deprecated
   @InterfaceAudience.Public
   @InterfaceStability.Evolving
   public enum MasterSwitchType {

http://git-wip-us.apache.org/repos/asf/hbase/blob/f5b57cd0/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
index 2c6cc35..d29c89c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
@@ -43,6 +43,7 @@ public class CorruptedSnapshotException extends 
HBaseSnapshotException {
* @param message full description of the failure
* @param snapshot snapshot that was expected
*/
+  @Deprecated
   public CorruptedSnapshotException(String message, SnapshotDescription 
snapshot) {
 super(message, snapshot);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/f5b57cd0/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
index 4a28461..cd2f66f 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
@@ -45,6 +45,7 @@ public class HBaseSnapshotException extends 
DoNotRetryIOException {
* @param msg reason why the snapshot failed
* @param desc description of the snapshot that is being failed
*/
+  @Deprecated
   public HBaseSnapshotException(String msg, SnapshotDescription desc) {
 super(msg);
 this.description = desc;
@@ -56,6 +57,7 @@ public class HBaseSnapshotException extends 
DoNotRetryIOException {
* @param cause root cause of the failure
* @param desc description of the snapshot that is being failed
*/
+  @Deprecated
   public HBaseSnapshotException(String msg, Throwable cause, 
SnapshotDescription desc) {
 super(msg, cause);
 this.description = desc;
@@ -71,6 +73,7 @@ public class HBaseSnapshotException extends 
DoNotRetryIOException {
 super(message, e);
   }
 
+  @Deprecated
   public SnapshotDescription getSnapshotDescription() {
 return this.description;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/f5b57cd0/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java
--
diff --git 

hbase git commit: HBASE-15608 Remove PB references from SnapShot related Exceptions (Ram)

2016-05-06 Thread ramkrishna
Repository: hbase
Updated Branches:
  refs/heads/branch-1 09d451b1d -> b9df7978f


HBASE-15608 Remove PB references from SnapShot related Exceptions (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b9df7978
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b9df7978
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b9df7978

Branch: refs/heads/branch-1
Commit: b9df7978fc6f0d947996f607db842a3393b04292
Parents: 09d451b
Author: Ramkrishna 
Authored: Fri May 6 17:07:44 2016 +0530
Committer: Ramkrishna 
Committed: Fri May 6 17:07:44 2016 +0530

--
 .../src/main/java/org/apache/hadoop/hbase/client/Admin.java   | 1 +
 .../apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java  | 1 +
 .../org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java  | 3 +++
 .../apache/hadoop/hbase/snapshot/RestoreSnapshotException.java| 2 ++
 .../apache/hadoop/hbase/snapshot/SnapshotCreationException.java   | 1 +
 .../hadoop/hbase/snapshot/SnapshotDoesNotExistException.java  | 1 +
 .../org/apache/hadoop/hbase/snapshot/SnapshotExistsException.java | 1 +
 7 files changed, 10 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b9df7978/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index c8b91a9..7b1d016 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -1526,6 +1526,7 @@ public interface Admin extends Abortable, Closeable {
* */
   void releaseSplitOrMergeLockAndRollback() throws IOException;
 
+  @Deprecated
   @InterfaceAudience.Public
   @InterfaceStability.Evolving
   public enum MasterSwitchType {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b9df7978/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
index 2c6cc35..d29c89c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/CorruptedSnapshotException.java
@@ -43,6 +43,7 @@ public class CorruptedSnapshotException extends 
HBaseSnapshotException {
* @param message full description of the failure
* @param snapshot snapshot that was expected
*/
+  @Deprecated
   public CorruptedSnapshotException(String message, SnapshotDescription 
snapshot) {
 super(message, snapshot);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b9df7978/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
index 4a28461..cd2f66f 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/HBaseSnapshotException.java
@@ -45,6 +45,7 @@ public class HBaseSnapshotException extends 
DoNotRetryIOException {
* @param msg reason why the snapshot failed
* @param desc description of the snapshot that is being failed
*/
+  @Deprecated
   public HBaseSnapshotException(String msg, SnapshotDescription desc) {
 super(msg);
 this.description = desc;
@@ -56,6 +57,7 @@ public class HBaseSnapshotException extends 
DoNotRetryIOException {
* @param cause root cause of the failure
* @param desc description of the snapshot that is being failed
*/
+  @Deprecated
   public HBaseSnapshotException(String msg, Throwable cause, 
SnapshotDescription desc) {
 super(msg, cause);
 this.description = desc;
@@ -71,6 +73,7 @@ public class HBaseSnapshotException extends 
DoNotRetryIOException {
 super(message, e);
   }
 
+  @Deprecated
   public SnapshotDescription getSnapshotDescription() {
 return this.description;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b9df7978/hbase-client/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotException.java
--
diff --git 

hbase git commit: HBASE-15608 Remove PB references from SnapShot related Exceptions (ram)

2016-05-06 Thread ramkrishna
Repository: hbase
Updated Branches:
  refs/heads/master 0d252918f -> bec81b197


HBASE-15608 Remove PB references from SnapShot related Exceptions (ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bec81b19
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bec81b19
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bec81b19

Branch: refs/heads/master
Commit: bec81b1977ab3862c424cc0e0768c5d18bda7dee
Parents: 0d25291
Author: Ramkrishna 
Authored: Fri May 6 17:06:25 2016 +0530
Committer: Ramkrishna 
Committed: Fri May 6 17:06:25 2016 +0530

--
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |  2 +-
 .../hbase/client/SnapshotDescription.java   |  8 
 .../hadoop/hbase/protobuf/ProtobufUtil.java | 14 +++
 .../snapshot/CorruptedSnapshotException.java|  2 +-
 .../hbase/snapshot/HBaseSnapshotException.java  |  2 +-
 .../snapshot/RestoreSnapshotException.java  |  2 +-
 .../snapshot/SnapshotCreationException.java |  2 +-
 .../snapshot/SnapshotDoesNotExistException.java |  2 +-
 .../hbase/snapshot/SnapshotExistsException.java |  2 +-
 .../procedure/CloneSnapshotProcedure.java   |  3 +-
 .../master/snapshot/MasterSnapshotVerifier.java | 16 +---
 .../hbase/master/snapshot/SnapshotManager.java  | 41 
 .../snapshot/SnapshotDescriptionUtils.java  |  6 ++-
 .../hadoop/hbase/snapshot/SnapshotManifest.java |  6 ++-
 .../hbase/snapshot/SnapshotReferenceUtil.java   | 15 ---
 15 files changed, 84 insertions(+), 39 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bec81b19/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 7371f03..4b0609d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -2333,7 +2333,7 @@ public class HBaseAdmin implements Admin {
 }
 if (!done.getDone()) {
   throw new SnapshotCreationException("Snapshot '" + snapshot.getName()
-  + "' wasn't completed in expectedTime:" + max + " ms", snapshot);
+  + "' wasn't completed in expectedTime:" + max + " ms", snapshotDesc);
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/bec81b19/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotDescription.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotDescription.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotDescription.java
index a455937..f737825 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotDescription.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SnapshotDescription.java
@@ -81,4 +81,12 @@ public class SnapshotDescription {
   public int getVersion() {
 return this.version;
   }
+
+  @Override
+  public String toString() {
+return "SnapshotDescription: name = " + ((name != null) ? name : null) + 
"/table = "
++ ((table != null) ? table : null) + " /owner = " + ((owner != null) ? 
owner : null)
++ (creationTime != -1 ? ("/creationtime = " + creationTime) : "")
++ (version != -1 ? ("/version = " + version) : "");
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/bec81b19/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 9978c77..50a4920 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -3472,4 +3472,18 @@ public final class ProtobufUtil {
 HBaseProtos.SnapshotDescription snapshot = builder.build();
 return snapshot;
   }
+
+  /**
+   * Convert from
+   * {@link 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription} to
+   * {@link SnapshotDescription}
+   * @param snapshotDesc the protobuf SnapshotDescription
+   * @return the POJO SnapshotDescription
+   */
+  public static SnapshotDescription
+  createSnapshotDesc(HBaseProtos.SnapshotDescription snapshotDesc) {
+return new SnapshotDescription(snapshotDesc.getName(), 
snapshotDesc.getTable(),
+