hbase git commit: HBASE-15861 Add support for table sets in restore operation (Vlad)

2016-05-31 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/HBASE-7912 a402006de -> 486fc54c6


HBASE-15861 Add support for table sets in restore operation (Vlad)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/486fc54c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/486fc54c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/486fc54c

Branch: refs/heads/HBASE-7912
Commit: 486fc54c6c36eb488248c7e41bd5235f825c83dd
Parents: a402006
Author: tedyu 
Authored: Tue May 31 21:04:33 2016 -0700
Committer: tedyu 
Committed: Tue May 31 21:04:33 2016 -0700

--
 .../hadoop/hbase/backup/RestoreDriver.java  |  62 +++--
 .../hadoop/hbase/backup/TestFullBackupSet.java  |  22 +++-
 .../backup/TestFullBackupSetRestoreSet.java | 127 +++
 3 files changed, 198 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/486fc54c/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java
index 83c8297..0dba079 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/backup/RestoreDriver.java
@@ -18,14 +18,20 @@
 package org.apache.hadoop.hbase.backup;
 
 import java.io.IOException;
+import java.util.List;
 
 import org.apache.commons.cli.CommandLine;
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.backup.impl.BackupRestoreConstants;
+import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
 import org.apache.hadoop.hbase.backup.util.BackupServerUtil;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
 import org.apache.hadoop.hbase.util.LogUtils;
 import org.apache.hadoop.util.ToolRunner;
@@ -40,9 +46,12 @@ public class RestoreDriver extends AbstractHBaseTool {
   private static final String OPTION_OVERWRITE = "overwrite";
   private static final String OPTION_CHECK = "check";
   private static final String OPTION_AUTOMATIC = "automatic";
+  private static final String OPTION_SET = "set";
+  private static final String OPTION_DEBUG = "debug";
+
 
   private static final String USAGE =
-  "Usage: hbase restore
[tableMapping] \n"
+  "Usage: hbase restore [-set set_name]   
 [tableMapping] \n"
   + "   [-overwrite] [-check] [-automatic]\n"
   + " backup_root_path  The parent location where the backup images 
are stored\n"
   + " backup_id The id identifying the backup image\n"
@@ -61,7 +70,8 @@ public class RestoreDriver extends AbstractHBaseTool {
   + "   The restore dependencies can be checked by 
using \"-check\" "
   + "option,\n"
   + "   or using \"hbase backup describe\" command. 
Without this option, "
-  + "only\n" + "   this backup image is restored\n";
+  + "only\n" + "   this backup image is restored\n"
+  + "   -set set_name   Backup set to restore, mutually exclusive with 
table list .";
 
 
   protected RestoreDriver() throws IOException
@@ -75,7 +85,8 @@ public class RestoreDriver extends AbstractHBaseTool {
 "Overwrite the data if any of the restore target tables exists");
 addOptNoArg(OPTION_CHECK, "Check restore sequence and dependencies");
 addOptNoArg(OPTION_AUTOMATIC, "Restore all dependencies");
-addOptNoArg("debug",  "Enable debug logging");
+addOptNoArg(OPTION_DEBUG,  "Enable debug logging");
+addOptWithArg(OPTION_SET, "Backup set name");
 
 // disable irrelevant loggers to avoid it mess up command output
 LogUtils.disableUselessLoggers(LOG);
@@ -85,7 +96,7 @@ public class RestoreDriver extends AbstractHBaseTool {
 
 // enable debug logging
 Logger backupClientLogger = 
Logger.getLogger("org.apache.hadoop.hbase.backup");
-if (cmd.hasOption("debug")) {
+if (cmd.hasOption(OPTION_DEBUG)) {
   backupClientLogger.setLevel(Level.DEBUG);
 }
 
@@ -112,16 +123,36 @@ public class RestoreDriver extends AbstractHBaseTool {
 
 // parse main restore command options
 String[] remainArgs = cmd.getArgs();
-if (remainArgs.length < 3) {
+if (remainArgs.length < 3 && 

[5/9] hbase-site git commit: Published site at 015f2ef6292df52270df8845ccd244a97deb9c98.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/15c0d509/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html 
b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
index b406aa2..0842516 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
@@ -379,94 +379,94 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 privateHMaster m_master
 
 
-
+
 
 
 
 
-m_filter
-privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_filter
+m_serverManager
+privateServerManager m_serverManager
 
 
-
+
 
 
 
 
-m_filter__IsNotDefault
-privateboolean m_filter__IsNotDefault
+m_serverManager__IsNotDefault
+privateboolean m_serverManager__IsNotDefault
 
 
-
+
 
 
 
 
-m_servers
-privatehttp://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName m_servers
+m_format
+privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_format
 
 
-
+
 
 
 
 
-m_servers__IsNotDefault
-privateboolean m_servers__IsNotDefault
+m_format__IsNotDefault
+privateboolean m_format__IsNotDefault
 
 
-
+
 
 
 
 
-m_format
-privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_format
+m_metaLocation
+privateServerName m_metaLocation
 
 
-
+
 
 
 
 
-m_format__IsNotDefault
-privateboolean m_format__IsNotDefault
+m_metaLocation__IsNotDefault
+privateboolean m_metaLocation__IsNotDefault
 
 
-
+
 
 
 
 
-m_deadServers
-privatehttp://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetServerName m_deadServers
+m_catalogJanitorEnabled
+privateboolean m_catalogJanitorEnabled
 
 
-
+
 
 
 
 
-m_deadServers__IsNotDefault
-privateboolean m_deadServers__IsNotDefault
+m_catalogJanitorEnabled__IsNotDefault
+privateboolean m_catalogJanitorEnabled__IsNotDefault
 
 
-
+
 
 
 
 
-m_serverManager
-privateServerManager m_serverManager
+m_filter
+privatehttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_filter
 
 
-
+
 
 
 
 
-m_serverManager__IsNotDefault
-privateboolean m_serverManager__IsNotDefault
+m_filter__IsNotDefault
+privateboolean m_filter__IsNotDefault
 
 
 
@@ -487,58 +487,58 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 privateboolean m_assignmentManager__IsNotDefault
 
 
-
+
 
 
 
 
-m_frags
-privatehttp://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer m_frags
+m_servers
+privatehttp://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName m_servers
 
 
-
+
 
 
 
 
-m_frags__IsNotDefault
-privateboolean m_frags__IsNotDefault
+m_servers__IsNotDefault
+privateboolean m_servers__IsNotDefault
 
 
-
+
 
 
 
 
-m_metaLocation
-privateServerName m_metaLocation
+m_deadServers
+privatehttp://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetServerName m_deadServers
 
 
-
+
 
 
 
 
-m_metaLocation__IsNotDefault
-privateboolean m_metaLocation__IsNotDefault
+m_deadServers__IsNotDefault
+privateboolean m_deadServers__IsNotDefault
 
 
-
+
 
 
 
 
-m_catalogJanitorEnabled
-privateboolean m_catalogJanitorEnabled
+m_frags
+privatehttp://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer m_frags
 
 
-
+
 
 
 
 
-m_catalogJanitorEnabled__IsNotDefault
-privateboolean m_catalogJanitorEnabled__IsNotDefault
+m_frags__IsNotDefault
+privateboolean m_frags__IsNotDefault
 
 
 
@@ -584,139 +584,139 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 publicHMastergetMaster()
 
 
-
+
 
 
 
 
-setFilter
-publicvoidsetFilter(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringfilter)
+setServerManager

[8/9] hbase-site git commit: Published site at 015f2ef6292df52270df8845ccd244a97deb9c98.

2016-05-31 Thread misty
Published site at 015f2ef6292df52270df8845ccd244a97deb9c98.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/15c0d509
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/15c0d509
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/15c0d509

Branch: refs/heads/asf-site
Commit: 15c0d50961ed6ae206d19ed4ea320903c61f9c7b
Parents: 7fb45f8
Author: jenkins 
Authored: Tue May 31 21:50:36 2016 +
Committer: Misty Stanley-Jones 
Committed: Tue May 31 15:07:06 2016 -0700

--
 acid-semantics.html | 6 +-
 apache_hbase_reference_guide.pdf| 35594 ++---
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 book.html   |   200 +-
 bulk-loads.html | 6 +-
 checkstyle-aggregate.html   | 6 +-
 coc.html| 6 +-
 cygwin.html | 6 +-
 dependencies.html   | 6 +-
 dependency-convergence.html | 6 +-
 dependency-info.html| 6 +-
 dependency-management.html  | 6 +-
 .../class-use/InterfaceStability.Unstable.html  |12 +-
 .../hbase/classification/package-tree.html  | 4 +-
 .../hadoop/hbase/client/package-tree.html   |12 +-
 .../hadoop/hbase/executor/package-tree.html | 2 +-
 .../hadoop/hbase/filter/package-tree.html   |10 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 6 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 2 +-
 .../hadoop/hbase/master/package-tree.html   | 4 +-
 .../hbase/master/procedure/package-tree.html| 4 +-
 .../org/apache/hadoop/hbase/package-tree.html   |12 +-
 .../hadoop/hbase/procedure2/package-tree.html   | 2 +-
 .../hadoop/hbase/quotas/package-tree.html   | 4 +-
 .../hadoop/hbase/regionserver/package-tree.html |24 +-
 .../hbase/security/access/package-tree.html | 2 +-
 .../hadoop/hbase/security/package-tree.html | 2 +-
 .../tmpl/master/MasterStatusTmpl.ImplData.html  |   240 +-
 .../hbase/tmpl/master/MasterStatusTmpl.html |96 +-
 .../hbase/tmpl/master/MasterStatusTmplImpl.html |48 +-
 .../regionserver/RSStatusTmpl.ImplData.html |60 +-
 .../hbase/tmpl/regionserver/RSStatusTmpl.html   |24 +-
 .../tmpl/regionserver/RSStatusTmplImpl.html |12 +-
 .../apache/hadoop/hbase/util/package-tree.html  | 8 +-
 .../tmpl/master/MasterStatusTmpl.ImplData.html  |   240 +-
 .../tmpl/master/MasterStatusTmpl.Intf.html  |   240 +-
 .../hbase/tmpl/master/MasterStatusTmpl.html |   240 +-
 .../hbase/tmpl/master/MasterStatusTmplImpl.html |68 +-
 .../regionserver/RSStatusTmpl.ImplData.html |60 +-
 .../tmpl/regionserver/RSStatusTmpl.Intf.html|60 +-
 .../hbase/tmpl/regionserver/RSStatusTmpl.html   |60 +-
 .../tmpl/regionserver/RSStatusTmplImpl.html |20 +-
 distribution-management.html| 6 +-
 export_control.html | 6 +-
 index.html  | 6 +-
 integration.html| 6 +-
 issue-tracking.html | 6 +-
 license.html| 6 +-
 mail-lists.html | 6 +-
 metrics.html| 6 +-
 modules.html| 6 +-
 old_news.html   | 6 +-
 plugin-management.html  | 6 +-
 plugins.html| 6 +-
 poweredbyhbase.html | 6 +-
 project-info.html   | 6 +-
 project-reports.html| 6 +-
 project-summary.html| 6 +-
 pseudo-distributed.html | 6 +-
 replication.html| 6 +-
 resources.html  | 6 +-
 source-repository.html  | 6 +-
 sponsors.html   | 6 +-
 supportingprojects.html | 6 +-
 team-list.html  | 6 +-
 .../org/apache/hadoop/hbase/package-tree.html   |12 +-
 .../hadoop/hbase/procedure2/package-tree.html   | 2 +-
 .../hadoop/hbase/regionserver/package-tree.html | 2 +-
 .../apache/hadoop/hbase/test/package-tree.html  | 2 +-
 .../apache/hadoop/hbase/wal/package-tree.html   | 2 +-
 .../hbase/tmpl/common/TaskMonitorTmpl.html  |90 +-
 

[1/9] hbase-site git commit: Published site at 015f2ef6292df52270df8845ccd244a97deb9c98.

2016-05-31 Thread misty
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 7fb45f8e4 -> 782444c78


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/15c0d509/xref/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
--
diff --git a/xref/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html 
b/xref/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
index 271752b..7b31a91 100644
--- a/xref/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
+++ b/xref/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
@@ -69,15 +69,15 @@
 59requiredArguments = {
 60  
@org.jamon.annotations.Argument(name = "master", type = "HMaster")},
 61optionalArguments = {
-62  
@org.jamon.annotations.Argument(name = "filter", type = "String"),
-63  
@org.jamon.annotations.Argument(name = "servers", type = "ListServerName"),
-64  
@org.jamon.annotations.Argument(name = "format", type = "String"),
-65  
@org.jamon.annotations.Argument(name = "deadServers", type = "SetServerName"),
-66  
@org.jamon.annotations.Argument(name = "serverManager", type = "ServerManager"),
+62  
@org.jamon.annotations.Argument(name = "serverManager", type = "ServerManager"),
+63  
@org.jamon.annotations.Argument(name = "format", type = "String"),
+64  
@org.jamon.annotations.Argument(name = "metaLocation", type = "ServerName"),
+65  
@org.jamon.annotations.Argument(name = "catalogJanitorEnabled", type = "boolean"),
+66  
@org.jamon.annotations.Argument(name = "filter", type = "String"),
 67  
@org.jamon.annotations.Argument(name = "assignmentManager", type = "AssignmentManager"),
-68  
@org.jamon.annotations.Argument(name = "frags", 
type = "MapString,Integer"),
-69  
@org.jamon.annotations.Argument(name = "metaLocation", type = "ServerName"),
-70  
@org.jamon.annotations.Argument(name = "catalogJanitorEnabled", type = "boolean")})
+68  
@org.jamon.annotations.Argument(name = "servers", type = "ListServerName"),
+69  
@org.jamon.annotations.Argument(name = "deadServers", type = "SetServerName"),
+70  
@org.jamon.annotations.Argument(name = "frags", 
type = "MapString,Integer")})
 71  public class 
MasterStatusTmpl
 72extends org.jamon.AbstractTemplateProxy
 73  {
@@ -118,91 +118,91 @@
 108   return m_master;
 109 }
 110 private HMaster
 m_master;
-111 // 26, 1
-112 public void 
setFilter(String filter)
+111 // 28, 1
+112 public void 
setServerManager(ServerManager
 serverManager)
 113 {
-114   // 26, 1
-115   m_filter = 
filter;
-116   
m_filter__IsNotDefault = true;
+114   // 28, 1
+115   m_serverManager 
= serverManager;
+116   
m_serverManager__IsNotDefault = true;
 117 }
-118 public String getFilter()
+118 public ServerManager
 getServerManager()
 119 {
-120   return m_filter;
+120   return m_serverManager;
 121 }
-122 private String m_filter;
-123 public boolean getFilter__IsNotDefault()
+122 private ServerManager
 m_serverManager;
+123 public boolean getServerManager__IsNotDefault()
 124 {
-125   return m_filter__IsNotDefault;
+125   return m_serverManager__IsNotDefault;
 126 }
-127 private boolean m_filter__IsNotDefault;
-128 // 23, 1
-129 public void 
setServers(ListServerName servers)
+127 private boolean m_serverManager__IsNotDefault;
+128 // 27, 1
+129 public void 
setFormat(String format)
 130 {
-131   // 23, 1
-132   m_servers = 
servers;
-133   
m_servers__IsNotDefault = true;
+131   // 27, 1
+132   m_format = 
format;
+133   
m_format__IsNotDefault = true;
 134 }
-135 public ListServerName getServers()
+135 public String getFormat()
 136 {
-137   return m_servers;
+137   return m_format;
 138 }
-139 private ListServerName m_servers;
-140 public boolean getServers__IsNotDefault()
+139 private String m_format;
+140 public boolean getFormat__IsNotDefault()
 141 {
-142   return m_servers__IsNotDefault;
+142   return m_format__IsNotDefault;
 143 }
-144 private boolean m_servers__IsNotDefault;
-145 // 27, 1
-146 public void 
setFormat(String format)
+144 private boolean m_format__IsNotDefault;
+145 // 22, 1
+146 public void 
setMetaLocation(ServerName 
metaLocation)
 147 {
-148   // 27, 1
-149   m_format = 
format;
-150   
m_format__IsNotDefault = true;
+148   // 22, 1
+149   m_metaLocation 
= metaLocation;
+150   
m_metaLocation__IsNotDefault = true;
 151 }
-152 public String getFormat()
+152 public ServerName 
getMetaLocation()
 153 {
-154   return m_format;
+154   return m_metaLocation;
 155 }
-156 private String m_format;
-157 public boolean getFormat__IsNotDefault()
+156 private ServerName 
m_metaLocation;
+157 public boolean getMetaLocation__IsNotDefault()
 

[3/9] hbase-site git commit: Published site at 015f2ef6292df52270df8845ccd244a97deb9c98.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/15c0d509/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
index 90efc25..4246d70 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
@@ -67,15 +67,15 @@
 059  requiredArguments = {
 060@org.jamon.annotations.Argument(name 
= "master", type = "HMaster")},
 061  optionalArguments = {
-062@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
-063@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName"),
-064@org.jamon.annotations.Argument(name 
= "format", type = "String"),
-065@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
-066@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
+062@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
+063@org.jamon.annotations.Argument(name 
= "format", type = "String"),
+064@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
+065@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean"),
+066@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
 067@org.jamon.annotations.Argument(name 
= "assignmentManager", type = "AssignmentManager"),
-068@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
-069@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
-070@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean")})
+068@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName"),
+069@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
+070@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer")})
 071public class MasterStatusTmpl
 072  extends 
org.jamon.AbstractTemplateProxy
 073{
@@ -116,91 +116,91 @@
 108  return m_master;
 109}
 110private HMaster m_master;
-111// 26, 1
-112public void setFilter(String 
filter)
+111// 28, 1
+112public void 
setServerManager(ServerManager serverManager)
 113{
-114  // 26, 1
-115  m_filter = filter;
-116  m_filter__IsNotDefault = true;
+114  // 28, 1
+115  m_serverManager = serverManager;
+116  m_serverManager__IsNotDefault = 
true;
 117}
-118public String getFilter()
+118public ServerManager 
getServerManager()
 119{
-120  return m_filter;
+120  return m_serverManager;
 121}
-122private String m_filter;
-123public boolean 
getFilter__IsNotDefault()
+122private ServerManager 
m_serverManager;
+123public boolean 
getServerManager__IsNotDefault()
 124{
-125  return m_filter__IsNotDefault;
+125  return 
m_serverManager__IsNotDefault;
 126}
-127private boolean 
m_filter__IsNotDefault;
-128// 23, 1
-129public void 
setServers(ListServerName servers)
+127private boolean 
m_serverManager__IsNotDefault;
+128// 27, 1
+129public void setFormat(String 
format)
 130{
-131  // 23, 1
-132  m_servers = servers;
-133  m_servers__IsNotDefault = true;
+131  // 27, 1
+132  m_format = format;
+133  m_format__IsNotDefault = true;
 134}
-135public ListServerName 
getServers()
+135public String getFormat()
 136{
-137  return m_servers;
+137  return m_format;
 138}
-139private ListServerName 
m_servers;
-140public boolean 
getServers__IsNotDefault()
+139private String m_format;
+140public boolean 
getFormat__IsNotDefault()
 141{
-142  return m_servers__IsNotDefault;
+142  return m_format__IsNotDefault;
 143}
-144private boolean 
m_servers__IsNotDefault;
-145// 27, 1
-146public void setFormat(String 
format)
+144private boolean 
m_format__IsNotDefault;
+145// 22, 1
+146public void 
setMetaLocation(ServerName metaLocation)
 147{
-148  // 27, 1
-149  m_format = format;
-150  m_format__IsNotDefault = true;
+148  // 22, 1
+149  m_metaLocation = metaLocation;
+150  m_metaLocation__IsNotDefault = 
true;
 151}
-152public String getFormat()
+152public ServerName getMetaLocation()
 153{
-154  return m_format;
+154  return m_metaLocation;
 155}
-156private String m_format;
-157public boolean 
getFormat__IsNotDefault()
+156private ServerName m_metaLocation;
+157public boolean 
getMetaLocation__IsNotDefault()
 158{
-159  return m_format__IsNotDefault;
+159  return 
m_metaLocation__IsNotDefault;
 160}
-161

[9/9] hbase-site git commit: Empty commit

2016-05-31 Thread misty
Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/782444c7
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/782444c7
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/782444c7

Branch: refs/heads/asf-site
Commit: 782444c78c974647df0399f61ddc4c509644a897
Parents: 15c0d50
Author: Misty Stanley-Jones 
Authored: Tue May 31 15:08:11 2016 -0700
Committer: Misty Stanley-Jones 
Committed: Tue May 31 15:08:11 2016 -0700

--

--




[7/9] hbase-site git commit: Published site at 015f2ef6292df52270df8845ccd244a97deb9c98.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/15c0d509/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 4ec93b9..54dbb67 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,24 +5,24 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
 /Producer (Apache HBase Team)
-/CreationDate (D:20160531144155+00'00')
-/ModDate (D:20160531144155+00'00')
+/CreationDate (D:20160531213923+00'00')
+/ModDate (D:20160531213923+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
 /Names 25 0 R
-/Outlines 3989 0 R
-/PageLabels 4193 0 R
+/Outlines 3994 0 R
+/PageLabels 4198 0 R
 /PageMode /UseOutlines
 /ViewerPreferences [/FitWindow]
 >>
 endobj
 3 0 obj
 << /Type /Pages
-/Count 657
-/Kids [7 0 R 13 0 R 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 39 0 R 43 0 R 47 0 R 58 
0 R 62 0 R 64 0 R 66 0 R 68 0 R 75 0 R 78 0 R 80 0 R 85 0 R 88 0 R 90 0 R 92 0 
R 101 0 R 107 0 R 112 0 R 114 0 R 130 0 R 135 0 R 142 0 R 145 0 R 148 0 R 157 0 
R 168 0 R 184 0 R 188 0 R 192 0 R 194 0 R 198 0 R 204 0 R 206 0 R 208 0 R 210 0 
R 212 0 R 215 0 R 221 0 R 223 0 R 225 0 R 227 0 R 229 0 R 231 0 R 233 0 R 235 0 
R 239 0 R 243 0 R 245 0 R 247 0 R 249 0 R 251 0 R 253 0 R 255 0 R 257 0 R 263 0 
R 266 0 R 268 0 R 270 0 R 272 0 R 277 0 R 282 0 R 287 0 R 290 0 R 294 0 R 309 0 
R 320 0 R 327 0 R 337 0 R 348 0 R 353 0 R 355 0 R 357 0 R 362 0 R 376 0 R 381 0 
R 384 0 R 389 0 R 393 0 R 404 0 R 416 0 R 431 0 R 437 0 R 439 0 R 441 0 R 448 0 
R 459 0 R 470 0 R 481 0 R 484 0 R 487 0 R 491 0 R 495 0 R 498 0 R 501 0 R 503 0 
R 506 0 R 510 0 R 512 0 R 516 0 R 525 0 R 527 0 R 533 0 R 535 0 R 539 0 R 547 0 
R 549 0 R 552 0 R 555 0 R 558 0 R 561 0 R 576 0 R 583 0 R 590 0 R 601 0 R 608 0 
R 616 0 R 624 0 R 627 0 R 631 0 R 634 0
  R 647 0 R 655 0 R 661 0 R 666 0 R 670 0 R 672 0 R 687 0 R 699 0 R 705 0 R 711 
0 R 714 0 R 722 0 R 730 0 R 735 0 R 740 0 R 745 0 R 747 0 R 749 0 R 751 0 R 759 
0 R 768 0 R 772 0 R 779 0 R 787 0 R 793 0 R 797 0 R 804 0 R 808 0 R 813 0 R 821 
0 R 823 0 R 827 0 R 838 0 R 843 0 R 845 0 R 848 0 R 852 0 R 858 0 R 861 0 R 873 
0 R 877 0 R 882 0 R 890 0 R 895 0 R 899 0 R 903 0 R 905 0 R 908 0 R 910 0 R 914 
0 R 916 0 R 919 0 R 924 0 R 928 0 R 933 0 R 937 0 R 944 0 R 948 0 R 953 0 R 966 
0 R 970 0 R 974 0 R 979 0 R 981 0 R 990 0 R 993 0 R 998 0 R 1001 0 R 1010 0 R 
1013 0 R 1019 0 R 1026 0 R 1029 0 R 1031 0 R 1040 0 R 1042 0 R 1044 0 R 1047 0 
R 1049 0 R 1051 0 R 1053 0 R 1055 0 R 1057 0 R 1060 0 R 1063 0 R 1068 0 R 1071 
0 R 1073 0 R 1075 0 R 1077 0 R 1082 0 R 1091 0 R 1094 0 R 1096 0 R 1098 0 R 
1103 0 R 1105 0 R 1108 0 R 1110 0 R 1112 0 R 1114 0 R 1117 0 R 1123 0 R 1128 0 
R 1135 0 R 1140 0 R 1154 0 R 1165 0 R 1170 0 R 1182 0 R 1191 0 R 1207 0 R 1211 
0 R 1221 0 R 1234 0 R 1237 0 R 1249 0 R 1258 0 R
  1266 0 R 1270 0 R 1279 0 R 1284 0 R 1288 0 R 1294 0 R 1300 0 R 1307 0 R 1315 
0 R 1317 0 R 1328 0 R 1330 0 R 1335 0 R 1339 0 R 1344 0 R 1354 0 R 1360 0 R 
1366 0 R 1368 0 R 1370 0 R 1383 0 R 1390 0 R 1399 0 R 1405 0 R 1419 0 R 1427 0 
R 1431 0 R 1440 0 R 1448 0 R 1456 0 R 1462 0 R 1466 0 R 1469 0 R 1471 0 R 1480 
0 R 1483 0 R 1490 0 R 1494 0 R 1497 0 R 1505 0 R 1509 0 R 1512 0 R 1514 0 R 
1522 0 R 1529 0 R 1535 0 R 1540 0 R 1544 0 R 1547 0 R 1553 0 R 1558 0 R 1563 0 
R 1565 0 R 1567 0 R 1570 0 R 1572 0 R 1581 0 R 1584 0 R 1590 0 R 1597 0 R 1601 
0 R 1607 0 R 1610 0 R 1612 0 R 1617 0 R 1620 0 R 1622 0 R 1624 0 R 1626 0 R 
1633 0 R 1643 0 R 1648 0 R 1655 0 R 1659 0 R 1661 0 R 1663 0 R 1665 0 R 1668 0 
R 1670 0 R 1672 0 R 1674 0 R 1678 0 R 1682 0 R 1691 0 R 1693 0 R 1695 0 R 1697 
0 R 1699 0 R 1705 0 R 1707 0 R 1712 0 R 1714 0 R 1716 0 R 1723 0 R 1728 0 R 
1732 0 R 1736 0 R 1739 0 R 1742 0 R 1746 0 R 1748 0 R 1751 0 R 1753 0 R 1755 0 
R 1757 0 R 1761 0 R 1763 0 R 1767 0 R 1769 0 R 1771 0 R 1773 0
  R 1775 0 R 1779 0 R 1782 0 R 1784 0 R 1786 0 R 1794 0 R 1804 0 R 1807 0 R 
1822 0 R 1837 0 R 1841 0 R 1846 0 R 1849 0 R 1852 0 R 1857 0 R 1859 0 R 1866 0 
R 1868 0 R 1871 0 R 1873 0 R 1875 0 R 1877 0 R 1879 0 R 1883 0 R 1885 0 R 1894 
0 R 1901 0 R 1907 0 R 1919 0 R 1933 0 R 1944 0 R 1964 0 R 1966 0 R 1968 0 R 
1972 0 R 1989 0 R 1997 0 R 2004 0 R 2013 0 R 2018 0 R 2028 0 R 2038 0 R 2043 0 
R 2052 0 R 2065 0 R 2082 0 R 2092 0 R 2095 0 R 2104 0 R 2119 0 R 2126 0 R 2129 
0 R 2134 0 R 2139 0 R 2149 0 R 2157 0 R 2160 0 R 2162 0 R 2166 0 R 2179 0 R 
2187 0 R 2193 0 R 2197 0 R 2200 0 R 2202 0 R 2204 0 R 2206 0 R 2208 0 R 2213 0 
R 2215 0 R 2225 0 R 2235 0 R 2242 0 R 2254 0 R 2259 0 R 2263 0 R 2276 0 R 2283 
0 R 2289 0 R 2291 0 R 2302 0 R 2309 0 R 2320 0 R 2324 0 R 2333 0 R 2339 0 R 
2349 0 R 2357 0 R 2365 0 R 2371 0 R 2376 0 R 2380 0 R 2383 0 R 2385 0 R 2392 0 
R 2396 0 R 2400 0 R 2406 0 R 2413 0 R 2418 0 R 2422 0 R 2432 0 R 2437 0 R 2442 
0 R 2455 0 R 2462 0 R 2466 0 R 2471 0 R 2478 0 R 2482 0 R 2487
  0 R 2495 0 R 2501 0 R 2503 0 R 2509 0 R 

[4/9] hbase-site git commit: Published site at 015f2ef6292df52270df8845ccd244a97deb9c98.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/15c0d509/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
index 90efc25..4246d70 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
@@ -67,15 +67,15 @@
 059  requiredArguments = {
 060@org.jamon.annotations.Argument(name 
= "master", type = "HMaster")},
 061  optionalArguments = {
-062@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
-063@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName"),
-064@org.jamon.annotations.Argument(name 
= "format", type = "String"),
-065@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
-066@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
+062@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
+063@org.jamon.annotations.Argument(name 
= "format", type = "String"),
+064@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
+065@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean"),
+066@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
 067@org.jamon.annotations.Argument(name 
= "assignmentManager", type = "AssignmentManager"),
-068@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
-069@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
-070@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean")})
+068@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName"),
+069@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
+070@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer")})
 071public class MasterStatusTmpl
 072  extends 
org.jamon.AbstractTemplateProxy
 073{
@@ -116,91 +116,91 @@
 108  return m_master;
 109}
 110private HMaster m_master;
-111// 26, 1
-112public void setFilter(String 
filter)
+111// 28, 1
+112public void 
setServerManager(ServerManager serverManager)
 113{
-114  // 26, 1
-115  m_filter = filter;
-116  m_filter__IsNotDefault = true;
+114  // 28, 1
+115  m_serverManager = serverManager;
+116  m_serverManager__IsNotDefault = 
true;
 117}
-118public String getFilter()
+118public ServerManager 
getServerManager()
 119{
-120  return m_filter;
+120  return m_serverManager;
 121}
-122private String m_filter;
-123public boolean 
getFilter__IsNotDefault()
+122private ServerManager 
m_serverManager;
+123public boolean 
getServerManager__IsNotDefault()
 124{
-125  return m_filter__IsNotDefault;
+125  return 
m_serverManager__IsNotDefault;
 126}
-127private boolean 
m_filter__IsNotDefault;
-128// 23, 1
-129public void 
setServers(ListServerName servers)
+127private boolean 
m_serverManager__IsNotDefault;
+128// 27, 1
+129public void setFormat(String 
format)
 130{
-131  // 23, 1
-132  m_servers = servers;
-133  m_servers__IsNotDefault = true;
+131  // 27, 1
+132  m_format = format;
+133  m_format__IsNotDefault = true;
 134}
-135public ListServerName 
getServers()
+135public String getFormat()
 136{
-137  return m_servers;
+137  return m_format;
 138}
-139private ListServerName 
m_servers;
-140public boolean 
getServers__IsNotDefault()
+139private String m_format;
+140public boolean 
getFormat__IsNotDefault()
 141{
-142  return m_servers__IsNotDefault;
+142  return m_format__IsNotDefault;
 143}
-144private boolean 
m_servers__IsNotDefault;
-145// 27, 1
-146public void setFormat(String 
format)
+144private boolean 
m_format__IsNotDefault;
+145// 22, 1
+146public void 
setMetaLocation(ServerName metaLocation)
 147{
-148  // 27, 1
-149  m_format = format;
-150  m_format__IsNotDefault = true;
+148  // 22, 1
+149  m_metaLocation = metaLocation;
+150  m_metaLocation__IsNotDefault = 
true;
 151}
-152public String getFormat()
+152public ServerName getMetaLocation()
 153{
-154  return m_format;
+154  return m_metaLocation;
 155}
-156private String m_format;
-157public boolean 
getFormat__IsNotDefault()
+156private ServerName m_metaLocation;
+157public boolean 
getMetaLocation__IsNotDefault()
 158{
-159  return m_format__IsNotDefault;
+159  return 

hbase git commit: HBASE-15923 Shell rows counter test fails

2016-05-31 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 73ec33856 -> 015f2ef62


HBASE-15923 Shell rows counter test fails


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/015f2ef6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/015f2ef6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/015f2ef6

Branch: refs/heads/master
Commit: 015f2ef6292df52270df8845ccd244a97deb9c98
Parents: 73ec338
Author: tedyu 
Authored: Tue May 31 14:21:32 2016 -0700
Committer: tedyu 
Committed: Tue May 31 14:21:32 2016 -0700

--
 hbase-shell/src/test/ruby/hbase/table_test.rb | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/015f2ef6/hbase-shell/src/test/ruby/hbase/table_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/table_test.rb 
b/hbase-shell/src/test/ruby/hbase/table_test.rb
index d4547b7..a617bc5 100644
--- a/hbase-shell/src/test/ruby/hbase/table_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/table_test.rb
@@ -561,7 +561,7 @@ module Hbase
 define_test "scan with a block should yield rows and return rows counter" 
do
   rows = {}
   res = @test_table._scan_internal { |row, cells| rows[row] = cells }
-  assert_equal(rows.keys.size, res)
+  assert_equal([rows.keys.size,false], res)
 end
 
 define_test "scan should support COLUMNS with value CONVERTER information" 
do



hbase git commit: HBASE-15923 Shell rows counter test fails

2016-05-31 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 536a8e836 -> 5824f2236


HBASE-15923 Shell rows counter test fails


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5824f223
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5824f223
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5824f223

Branch: refs/heads/branch-1.3
Commit: 5824f2236b8b59882113c523d689b734b3ff4996
Parents: 536a8e8
Author: tedyu 
Authored: Tue May 31 14:20:29 2016 -0700
Committer: tedyu 
Committed: Tue May 31 14:20:29 2016 -0700

--
 hbase-shell/src/test/ruby/hbase/table_test.rb | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5824f223/hbase-shell/src/test/ruby/hbase/table_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/table_test.rb 
b/hbase-shell/src/test/ruby/hbase/table_test.rb
index d74c6d8..0fb5a14 100644
--- a/hbase-shell/src/test/ruby/hbase/table_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/table_test.rb
@@ -561,7 +561,7 @@ module Hbase
 define_test "scan with a block should yield rows and return rows counter" 
do
   rows = {}
   res = @test_table._scan_internal { |row, cells| rows[row] = cells }
-  assert_equal(rows.keys.size, res)
+  assert_equal([rows.keys.size,false], res)
 end
 
 define_test "scan should support COLUMNS with value CONVERTER information" 
do



hbase git commit: HBASE-15923 Shell rows counter test fails

2016-05-31 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 32258c2b3 -> 0cedd8b34


HBASE-15923 Shell rows counter test fails


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0cedd8b3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0cedd8b3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0cedd8b3

Branch: refs/heads/branch-1
Commit: 0cedd8b344acc54534630a65ce7ecb9de119b2b0
Parents: 32258c2
Author: tedyu 
Authored: Tue May 31 14:19:42 2016 -0700
Committer: tedyu 
Committed: Tue May 31 14:19:42 2016 -0700

--
 hbase-shell/src/test/ruby/hbase/table_test.rb | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0cedd8b3/hbase-shell/src/test/ruby/hbase/table_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/table_test.rb 
b/hbase-shell/src/test/ruby/hbase/table_test.rb
index d74c6d8..0fb5a14 100644
--- a/hbase-shell/src/test/ruby/hbase/table_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/table_test.rb
@@ -561,7 +561,7 @@ module Hbase
 define_test "scan with a block should yield rows and return rows counter" 
do
   rows = {}
   res = @test_table._scan_internal { |row, cells| rows[row] = cells }
-  assert_equal(rows.keys.size, res)
+  assert_equal([rows.keys.size,false], res)
 end
 
 define_test "scan should support COLUMNS with value CONVERTER information" 
do



hbase git commit: HBASE-15907 updates for HBase Shell pre-splitting docs

2016-05-31 Thread misty
Repository: hbase
Updated Branches:
  refs/heads/master eb64cd9dd -> 73ec33856


HBASE-15907 updates for HBase Shell pre-splitting docs

(cherry picked from commit 01adec574d9ccbdd6183466cb8ee6b43935d69ca)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/73ec3385
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/73ec3385
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/73ec3385

Branch: refs/heads/master
Commit: 73ec33856d0ee2ac1e058c6f7e1ccffa4476fbc0
Parents: eb64cd9
Author: Ronan Stokes 
Authored: Mon May 30 23:52:43 2016 -0700
Committer: Misty Stanley-Jones 
Committed: Tue May 31 13:52:46 2016 -0700

--
 src/main/asciidoc/_chapters/performance.adoc | 19 ++-
 src/main/asciidoc/_chapters/shell.adoc   | 62 +++
 2 files changed, 79 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/73ec3385/src/main/asciidoc/_chapters/performance.adoc
--
diff --git a/src/main/asciidoc/_chapters/performance.adoc 
b/src/main/asciidoc/_chapters/performance.adoc
index a0c00ae..5f27640 100644
--- a/src/main/asciidoc/_chapters/performance.adoc
+++ b/src/main/asciidoc/_chapters/performance.adoc
@@ -499,7 +499,7 @@ For bulk imports, this means that all clients will write to 
the same region unti
 A useful pattern to speed up the bulk import process is to pre-create empty 
regions.
 Be somewhat conservative in this, because too-many regions can actually 
degrade performance.
 
-There are two different approaches to pre-creating splits.
+There are two different approaches to pre-creating splits using the HBase API.
 The first approach is to rely on the default `Admin` strategy (which is 
implemented in `Bytes.split`)...
 
 [source,java]
@@ -511,7 +511,7 @@ int numberOfRegions = ...;  // # of regions to create
 admin.createTable(table, startKey, endKey, numberOfRegions);
 
 
-And the other approach is to define the splits yourself...
+And the other approach, using the HBase API, is to define the splits 
yourself...
 
 [source,java]
 
@@ -519,8 +519,23 @@ byte[][] splits = ...;   // create your own splits
 admin.createTable(table, splits);
 
 
+You can achieve a similar effect using the HBase Shell to create tables by 
specifying split options. 
+
+[source]
+
+# create table with specific split points
+hbase>create 't1','f1',SPLITS => ['\x10\x00', '\x20\x00', '\x30\x00', 
'\x40\x00']
+
+# create table with four regions based on random bytes keys
+hbase>create 't2','f1', { NUMREGIONS => 4 , SPLITALGO => 'UniformSplit' }
+
+# create table with five regions based on hex keys
+create 't3','f1', { NUMREGIONS => 5, SPLITALGO => 'HexStringSplit' }
+
+
 See <> for issues related to understanding your keyspace 
and pre-creating regions.
 See <>  
for discussion on manually pre-splitting regions.
+See <> for more details of using the HBase Shell to 
pre-split tables.
 
 [[def.log.flush]]
 ===  Table Creation: Deferred Log Flush

http://git-wip-us.apache.org/repos/asf/hbase/blob/73ec3385/src/main/asciidoc/_chapters/shell.adoc
--
diff --git a/src/main/asciidoc/_chapters/shell.adoc 
b/src/main/asciidoc/_chapters/shell.adoc
index a4237fd..8f1f59b 100644
--- a/src/main/asciidoc/_chapters/shell.adoc
+++ b/src/main/asciidoc/_chapters/shell.adoc
@@ -352,6 +352,68 @@ hbase(main):022:0> Date.new(1218920189000).toString() => 
"Sat Aug 16 20:56:29 UT
 
 To output in a format that is exactly like that of the HBase log format will 
take a little messing with 
link:http://download.oracle.com/javase/6/docs/api/java/text/SimpleDateFormat.html[SimpleDateFormat].
 
+[[tricks.pre-split]]
+=== Pre-splitting tables with the HBase Shell
+You can use a variety of options to pre-split tables when creating them via 
the HBase Shell `create` command.
+
+The simplest approach is to specify an array of split points when creating the 
table. Note that when specifying string literals as split points, these will 
create split points based on the underlying byte representation of the string. 
So when specifying a split point of '10', we are actually specifying the byte 
split point '\x31\30'.
+
+The split points will define `n+1` regions where `n` is the number of split 
points. The lowest region will contain all keys from the lowest possible key up 
to but not including the first split point key.
+The next region will contain keys from the first split point up to, but not 
including the next split point key.
+This will continue for all split points up to the last. The last region will 
be defined from the last split point up to 

hbase git commit: HBASE-15884 NPE in StoreFileScanner#skipKVsNewerThanReadpoint during reverse scan (Sergey Soldatov)

2016-05-31 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/0.98 60dbf6769 -> 67dc17240


HBASE-15884 NPE in StoreFileScanner#skipKVsNewerThanReadpoint during reverse 
scan (Sergey Soldatov)

Conflicts:

hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/67dc1724
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/67dc1724
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/67dc1724

Branch: refs/heads/0.98
Commit: 67dc17240edffdb048d63ee13e12bbcac9d51bfc
Parents: 60dbf67
Author: tedyu 
Authored: Wed May 25 17:29:32 2016 -0700
Committer: Andrew Purtell 
Committed: Tue May 31 13:47:06 2016 -0700

--
 .../apache/hadoop/hbase/regionserver/StoreFileScanner.java   | 8 
 1 file changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/67dc1724/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java
index 5330ffb..0a0fed7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileScanner.java
@@ -223,11 +223,11 @@ public class StoreFileScanner implements KeyValueScanner {
 while(enforceMVCC
 && cur != null
 && (cur.getMvccVersion() > readPt)) {
-  hfs.next();
+  boolean hasNext = hfs.next();
   cur = hfs.getKeyValue();
-  if (this.stopSkippingKVsIfNextRow
-  && getComparator().compareRows(cur.getBuffer(), cur.getRowOffset(),
-  cur.getRowLength(), startKV.getBuffer(), startKV.getRowOffset(),
+  if (hasNext && this.stopSkippingKVsIfNextRow
+  && getComparator().compareRows(cur.getRowArray(), cur.getRowOffset(),
+  cur.getRowLength(), startKV.getRowArray(), 
startKV.getRowOffset(),
   startKV.getRowLength()) > 0) {
 return false;
   }



[1/2] hbase git commit: HBASE-15873 ACL for snapshot restore / clone is not enforced

2016-05-31 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/0.98 45bf0c64f -> 60dbf6769


HBASE-15873 ACL for snapshot restore / clone is not enforced


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7be7b8ad
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7be7b8ad
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7be7b8ad

Branch: refs/heads/0.98
Commit: 7be7b8ad9622a2297dcd00a5f580d748e08f33e0
Parents: 45bf0c6
Author: tedyu 
Authored: Sun May 22 19:13:13 2016 -0700
Committer: Andrew Purtell 
Committed: Tue May 31 11:59:40 2016 -0700

--
 .../apache/hadoop/hbase/master/snapshot/SnapshotManager.java | 8 
 1 file changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7be7b8ad/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
index 621ea8a..ead27d2 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotManager.java
@@ -704,24 +704,24 @@ public class SnapshotManager extends 
MasterProcedureManager implements Stoppable
 
   // call coproc pre hook
   if (cpHost != null) {
-cpHost.preRestoreSnapshot(reqSnapshot, snapshotTableDesc);
+cpHost.preRestoreSnapshot(fsSnapshot, snapshotTableDesc);
   }
   restoreSnapshot(fsSnapshot, snapshotTableDesc);
   LOG.info("Restore snapshot=" + fsSnapshot.getName() + " as table=" + 
tableName);
 
   if (cpHost != null) {
-cpHost.postRestoreSnapshot(reqSnapshot, snapshotTableDesc);
+cpHost.postRestoreSnapshot(fsSnapshot, snapshotTableDesc);
   }
 } else {
   HTableDescriptor htd = 
RestoreSnapshotHelper.cloneTableSchema(snapshotTableDesc, tableName);
   if (cpHost != null) {
-cpHost.preCloneSnapshot(reqSnapshot, htd);
+cpHost.preCloneSnapshot(fsSnapshot, htd);
   }
   cloneSnapshot(fsSnapshot, htd);
   LOG.info("Clone snapshot=" + fsSnapshot.getName() + " as table=" + 
tableName);
 
   if (cpHost != null) {
-cpHost.postCloneSnapshot(reqSnapshot, htd);
+cpHost.postCloneSnapshot(fsSnapshot, htd);
   }
 }
   }



hbase git commit: HBASE-15822 Move to the latest docker base image

2016-05-31 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 5b10031a1 -> 3fe62fe16


HBASE-15822 Move to the latest docker base image


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3fe62fe1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3fe62fe1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3fe62fe1

Branch: refs/heads/HBASE-14850
Commit: 3fe62fe16d3292b73449775a6d6e4f89b9333b47
Parents: 5b10031
Author: Elliott Clark 
Authored: Tue May 31 10:42:45 2016 -0700
Committer: Elliott Clark 
Committed: Tue May 31 10:42:45 2016 -0700

--
 hbase-native-client/Dockerfile   | 2 +-
 hbase-native-client/if/RPC.proto | 1 +
 hbase-native-client/if/WAL.proto | 5 +++--
 3 files changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3fe62fe1/hbase-native-client/Dockerfile
--
diff --git a/hbase-native-client/Dockerfile b/hbase-native-client/Dockerfile
index be0cbc6..ca87523 100644
--- a/hbase-native-client/Dockerfile
+++ b/hbase-native-client/Dockerfile
@@ -15,7 +15,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-FROM pjameson/buck-folly-watchman:20160425
+FROM pjameson/buck-folly-watchman:20160511
 
 ARG CC=/usr/bin/gcc-5
 ARG CXX=/usr/bin/g++-5

http://git-wip-us.apache.org/repos/asf/hbase/blob/3fe62fe1/hbase-native-client/if/RPC.proto
--
diff --git a/hbase-native-client/if/RPC.proto b/hbase-native-client/if/RPC.proto
index 59bb03d..8413d25 100644
--- a/hbase-native-client/if/RPC.proto
+++ b/hbase-native-client/if/RPC.proto
@@ -125,6 +125,7 @@ message RequestHeader {
   // 0 is NORMAL priority.  200 is HIGH.  If no priority, treat it as NORMAL.
   // See HConstants.
   optional uint32 priority = 6;
+  optional uint32 timeout = 7;
 }
 
 message ResponseHeader {

http://git-wip-us.apache.org/repos/asf/hbase/blob/3fe62fe1/hbase-native-client/if/WAL.proto
--
diff --git a/hbase-native-client/if/WAL.proto b/hbase-native-client/if/WAL.proto
index cb9bd8f..2061b22 100644
--- a/hbase-native-client/if/WAL.proto
+++ b/hbase-native-client/if/WAL.proto
@@ -44,9 +44,9 @@ message WALKey {
   required uint64 log_sequence_number = 3;
   required uint64 write_time = 4;
   /*
-  This parameter is deprecated in favor of clusters which 
+  This parameter is deprecated in favor of clusters which
   contains the list of clusters that have consumed the change.
-  It is retained so that the log created by earlier releases (0.94) 
+  It is retained so that the log created by earlier releases (0.94)
   can be read by the newer releases.
   */
   optional UUID cluster_id = 5 [deprecated=true];
@@ -132,6 +132,7 @@ message StoreDescriptor {
   required bytes family_name = 1;
   required string store_home_dir = 2; //relative to region dir
   repeated string store_file = 3; // relative to store dir
+  optional uint64 store_file_size_bytes = 4; // size of store file
 }
 
 /**



[30/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.html 
b/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.html
index 0e9ca1e..f20dd6d 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.html
@@ -38,166 +38,167 @@
 030import 
java.security.NoSuchAlgorithmException;
 031import 
java.security.UnrecoverableKeyException;
 032import 
java.security.cert.CertificateException;
-033import java.util.Properties;
-034
-035import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-036import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-037
-038/**
-039 * A basic KeyProvider that can resolve 
keys from a protected KeyStore file
-040 * on the local filesystem. It is 
configured with a URI passed in as a String
-041 * to init(). The URI should have the 
form:
-042 * p
-043 * pre
scheme://path?option1=value1amp;option2=value2/pre
-044 * p
-045 * ischeme/i can be 
either "jks" or "jceks", specifying the file based
-046 * providers shipped with every JRE. The 
latter is the certificate store for
-047 * the SunJCE cryptography extension, or 
PKCS #12, and is capable of storing
-048 * SecretKeys.
-049 * p
-050 * ipath/i is the 
location of the keystore in the filesystem namespace.
-051 * p
-052 * Options can be specified as query 
parameters.
-053 * p
-054 * If the store was created with a 
password, the password can be specified
-055 * using the option 'password'.
-056 * p
-057 * For example:
-058 * p
-059 * pre
jceks:///var/tmp/example.ks?password=foobar/pre
-060 * p
-061 * It is assumed that all keys in the 
store are protected with the same
-062 * password.
-063 * p
-064 * Alternatively, a properties file can 
be specified containing passwords for
-065 * keys in the keystore.
-066 * pre
jceks:///var/tmp/example.ks?passwordFile=/var/tmp/example.pw/pre
-067 * p
-068 * Subclasses for supporting KeyStores 
that are not file based can extend the
-069 * protected methods of this class to 
specify the appropriate
-070 * LoadStoreParameters.
-071 */
-072@InterfaceAudience.Public
-073@InterfaceStability.Evolving
-074public class KeyStoreKeyProvider 
implements KeyProvider {
-075
-076  protected KeyStore store;
-077  protected char[] password; // 
can be null if no password
-078  protected Properties passwordFile; // 
can be null if no file provided
-079
-080  protected void processParameter(String 
name, String value) throws IOException {
-081if 
(name.equalsIgnoreCase(KeyProvider.PASSWORD)) {
-082  password = value.toCharArray();
-083}
-084if 
(name.equalsIgnoreCase(KeyProvider.PASSWORDFILE)) {
-085  Properties p = new Properties();
-086  InputStream in = new 
BufferedInputStream(new FileInputStream(new File(value)));
-087  try {
-088p.load(in);
-089passwordFile = p;
-090  } finally {
-091in.close();
-092  }
-093}
-094  }
-095
-096  protected void processParameters(URI 
uri) throws IOException {
-097String params = uri.getQuery();
-098if (params == null || 
params.isEmpty()) {
-099  return;
-100}
-101do {
-102  int nameStart = 0;
-103  int nameEnd = 
params.indexOf('=');
-104  if (nameEnd == -1) {
-105throw new 
RuntimeException("Invalid parameters: '" + params + "'");
-106  }
-107  int valueStart = nameEnd + 1;
-108  int valueEnd = 
params.indexOf('');
-109  if (valueEnd == -1) {
-110valueEnd = params.length();
-111  }
-112  String name = 
URLDecoder.decode(params.substring(nameStart, nameEnd), "UTF-8");
-113  String value = 
URLDecoder.decode(params.substring(valueStart, valueEnd), "UTF-8");
-114  processParameter(name, value);
-115  params = params.substring(valueEnd, 
params.length());
-116} while (!params.isEmpty());
-117  }
-118
-119  protected void load(URI uri) throws 
IOException {
-120String path = uri.getPath();
-121if (path == null || path.isEmpty()) 
{
-122  throw new 
RuntimeException("KeyProvider parameters should specify a path");
-123}
-124InputStream is = new 
FileInputStream(new File(path));
-125try {
-126  store.load(is, password);
-127} catch (NoSuchAlgorithmException e) 
{
-128  throw new RuntimeException(e);
-129} catch (CertificateException e) {
-130  throw new RuntimeException(e);
-131} finally {
-132  is.close();
-133}
-134  }
-135
-136  @Override
-137  public void init(String params) {
-138try {
-139  URI uri = new URI(params);
-140  String storeType = 
uri.getScheme();
-141  if (storeType == null || 
storeType.isEmpty()) {
-142throw new 

[04/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/client/ClusterConnection.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/ClusterConnection.html 
b/devapidocs/org/apache/hadoop/hbase/client/ClusterConnection.html
index b1afd64..bb8583c 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/ClusterConnection.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/ClusterConnection.html
@@ -61,13 +61,13 @@
 
 Summary:
 Nested|
-Field|
+Field|
 Constr|
 Method
 
 
 Detail:
-Field|
+Field|
 Constr|
 Method
 
@@ -87,7 +87,7 @@
 
 
 All Superinterfaces:
-Abortable, http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true;
 title="class or interface in java.lang">AutoCloseable, http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable, Connection, HConnection
+Abortable, http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true;
 title="class or interface in java.lang">AutoCloseable, http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable, Connection
 
 
 All Known Implementing Classes:
@@ -97,7 +97,7 @@
 
 @InterfaceAudience.Private
 public interface ClusterConnection
-extends HConnection
+extends Connection
 Internal methods on Connection that should not be used by 
user code.
 
 
@@ -111,13 +111,20 @@ extends 
 
 Field Summary
-
-
-
-
-Fields inherited from interfaceorg.apache.hadoop.hbase.client.HConnection
-HBASE_CLIENT_CONNECTION_IMPL
-
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+HBASE_CLIENT_CONNECTION_IMPL
+Key for configuration in Configuration whose value is the 
class we implement making a
+ new Connection instance.
+
+
+
 
 
 
@@ -192,6 +199,10 @@ extends getConnectionMetrics()
 
 
+int
+getCurrentNrHRS()
+
+
 MasterKeepAliveConnection
 getKeepAliveMasterService()
 Deprecated.
@@ -199,23 +210,23 @@ extends 
 
 
-
+
 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService.BlockingInterface
 getMaster()
 Returns a MasterKeepAliveConnection to 
the active master
 
 
-
+
 RpcRetryingCallerFactory
 getNewRpcRetryingCallerFactory(org.apache.hadoop.conf.Configurationconf)
 Returns a new RpcRetryingCallerFactory from the given 
Configuration.
 
 
-
+
 NonceGenerator
 getNonceGenerator()
 
-
+
 HRegionLocation
 getRegionLocation(TableNametableName,
   byte[]row,
@@ -223,18 +234,24 @@ extends Find region location hosting passed row
 
 
-
+
 RpcControllerFactory
 getRpcControllerFactory()
 
-
+
 RpcRetryingCallerFactory
 getRpcRetryingCallerFactory()
 
-
+
 ServerStatisticTracker
 getStatisticsTracker()
 
+
+TableState
+getTableState(TableNametableName)
+Retrieve TableState, represent current table state.
+
+
 
 boolean
 hasCellBlockSupport()
@@ -243,7 +260,7 @@ extends boolean
 isDeadServer(ServerNameserverName)
 Deprecated.
-internal method, do not use thru HConnection
+internal method, do not use thru 
ClusterConnection
 
 
 
@@ -264,6 +281,17 @@ extends 
 
 
+boolean
+isTableDisabled(TableNametableName)
+
+
+boolean
+isTableEnabled(TableNametableName)
+A table that isTableEnabled == false and isTableDisabled == 
false
+ is possible.
+
+
+
 HRegionLocation
 locateRegion(byte[]regionName)
 Gets the location of the region of regionName.
@@ -335,18 +363,11 @@ extends 
 
 
-
-
-
-Methods inherited from interfaceorg.apache.hadoop.hbase.client.HConnection
-clearRegionCache,
 getAdmin,
 getAdmin,
 getConfiguration,
 getCurrentNrHRS,
 getHTableDescriptor,
 getHTableDescriptor,
 getHTableDescri
 ptors, getHTableDescriptorsByTableName,
 getRegionCachePrefetch,
 getRegionCachePrefetch,
 getRegionLocation,
 getRegionLocator,
 getTable,
 getTable,
  getTable,
 getTable,
 getTable,
 getTable,
 getTableNames,
 getTableState,
 isClosed,
 isTableAvailable, isTableAvailable,
 isTableAvailable,
 isTableDisabled,
 isTableDisabled,
 isTableEnabled,
 isTableEnabled,
 listTableNames,
 listTables, locateRegion,
 locateRegions,
 locateRegions,
 processBatch,
 processBatch,
 processBatchCallback,
 processBatchCallback,
 relocateRegion,
 setRegionCachePrefetch,
 setRegionCachePrefetch,
 updateCachedLocations, updateCachedLocations
-
-
 
 
 
 Methods inherited from interfaceorg.apache.hadoop.hbase.client.Connection
-close,
 getBufferedMutator,
 getBufferedMutator
+close,
 getAdmin,
 getBufferedMutator,
 getBufferedMutator,
 getConfiguration,
 getRegionLocator,
 getTable,
 getTable, isClosed
 
 
 
@@ -363,6 +384,26 @@ extends 
 
 
+
+
+
+
+
+Field Detail
+
+
+
+
+
+HBASE_CLIENT_CONNECTION_IMPL
+static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface 

[06/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 0f206c7..4475e7c 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -759,39 +759,39 @@ service.
 
 
 private TableName
-BufferedMutatorParams.tableName
+HRegionLocator.tableName
 
 
-private TableName
-TableState.tableName
+protected TableName
+RpcRetryingCallerWithReadReplicas.tableName
 
 
-protected TableName
-AbstractRegionServerCallable.tableName
+private TableName
+BufferedMutatorImpl.tableName
 
 
 private TableName
-ClientScanner.tableName
+TableState.tableName
 
 
 private TableName
-HTable.tableName
+BufferedMutatorParams.tableName
 
 
 protected TableName
-RpcRetryingCallerWithReadReplicas.tableName
+RegionAdminServiceCallable.tableName
 
 
 private TableName
-HRegionLocator.tableName
+AsyncProcess.AsyncRequestFutureImpl.tableName
 
 
 private TableName
-HBaseAdmin.TableFuture.tableName
+HTable.tableName
 
 
 private TableName
-AsyncProcess.AsyncRequestFutureImpl.tableName
+ClientScanner.tableName
 
 
 private TableName
@@ -799,11 +799,11 @@ service.
 
 
 private TableName
-BufferedMutatorImpl.tableName
+HBaseAdmin.TableFuture.tableName
 
 
 protected TableName
-RegionAdminServiceCallable.tableName
+AbstractRegionServerCallable.tableName
 
 
 
@@ -837,6 +837,14 @@ service.
 
 
 TableName
+HRegionLocator.getName()
+
+
+TableName
+BufferedMutatorImpl.getName()
+
+
+TableName
 BufferedMutator.getName()
 Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
 
@@ -853,18 +861,10 @@ service.
 
 
 TableName
-HRegionLocator.getName()
-
-
-TableName
 RegionLocator.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
-
-TableName
-BufferedMutatorImpl.getName()
-
 
 TableName
 HTableWrapper.getName()
@@ -875,22 +875,22 @@ service.
 
 
 TableName
-BufferedMutatorParams.getTableName()
-
-
-TableName
 TableState.getTableName()
 Table name for state
 
 
-
+
 TableName
-AbstractRegionServerCallable.getTableName()
+BufferedMutatorParams.getTableName()
 
-
+
 protected TableName
 HBaseAdmin.TableFuture.getTableName()
 
+
+TableName
+AbstractRegionServerCallable.getTableName()
+
 
 private TableName
 HBaseAdmin.getTableNameBeforeRestoreSnapshot(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringsnapshotName)
@@ -903,24 +903,8 @@ service.
 
 
 TableName[]
-HConnection.listTableNames()
-Deprecated.
-Use Admin.listTables()
 instead.
-
-
-
-
-TableName[]
 HBaseAdmin.listTableNames()
 
-
-TableName[]
-ConnectionImplementation.listTableNames()
-Deprecated.
-Use Admin.listTableNames()
 instead
-
-
-
 
 TableName[]
 Admin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
@@ -1020,34 +1004,34 @@ service.
 
 
 void
-MetaCache.cacheLocation(TableNametableName,
-  RegionLocationslocations)
-Put a newly discovered HRegionLocation into the cache.
-
-
-
-void
 ConnectionImplementation.cacheLocation(TableNametableName,
   RegionLocationslocation)
 Put a newly discovered HRegionLocation into the cache.
 
 
-
+
 void
 ClusterConnection.cacheLocation(TableNametableName,
   RegionLocationslocation)
 
-
+
 void
-MetaCache.cacheLocation(TableNametableName,
+MetaCache.cacheLocation(TableNametableName,
+  RegionLocationslocations)
+Put a newly discovered HRegionLocation into the cache.
+
+
+
+private void
+ConnectionImplementation.cacheLocation(TableNametableName,
   ServerNamesource,
   HRegionLocationlocation)
 Put a newly discovered HRegionLocation into the cache.
 
 
 
-private void
-ConnectionImplementation.cacheLocation(TableNametableName,
+void
+MetaCache.cacheLocation(TableNametableName,
   ServerNamesource,
   HRegionLocationlocation)
 Put a newly discovered HRegionLocation into the cache.
@@ -1082,53 +1066,45 @@ service.
 
 
 void
-HConnection.clearRegionCache(TableNametableName)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-void
 ConnectionImplementation.clearRegionCache(TableNametableName)
 
-
+
 void
 ClusterConnection.clearRegionCache(TableNametableName)
 Allows flushing the region cache of all locations that 
pertain to
  tableName
 
 
-
+
 void
 ConnectionImplementation.clearRegionCache(TableNametableName,
 byte[]row)
 
-
+
 void
 Admin.cloneSnapshot(byte[]snapshotName,
   

[29/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
index fff3fe9..654bc34 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/Import.html
@@ -36,757 +36,758 @@
 028import java.util.ArrayList;
 029import java.util.Collections;
 030import java.util.List;
-031import java.util.Map;
-032import java.util.TreeMap;
-033import java.util.UUID;
-034
-035import org.apache.commons.logging.Log;
-036import 
org.apache.commons.logging.LogFactory;
-037import 
org.apache.hadoop.conf.Configuration;
-038import 
org.apache.hadoop.conf.Configured;
-039import org.apache.hadoop.fs.FileSystem;
-040import org.apache.hadoop.fs.Path;
-041import org.apache.hadoop.hbase.Cell;
-042import 
org.apache.hadoop.hbase.CellComparator;
-043import 
org.apache.hadoop.hbase.CellUtil;
-044import 
org.apache.hadoop.hbase.HBaseConfiguration;
-045import 
org.apache.hadoop.hbase.KeyValue;
-046import 
org.apache.hadoop.hbase.KeyValueUtil;
-047import 
org.apache.hadoop.hbase.TableName;
-048import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-049import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-050import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-051import 
org.apache.hadoop.hbase.client.Admin;
-052import 
org.apache.hadoop.hbase.client.Connection;
-053import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-054import 
org.apache.hadoop.hbase.client.Delete;
-055import 
org.apache.hadoop.hbase.client.Durability;
-056import 
org.apache.hadoop.hbase.client.Mutation;
-057import 
org.apache.hadoop.hbase.client.Put;
-058import 
org.apache.hadoop.hbase.client.RegionLocator;
-059import 
org.apache.hadoop.hbase.client.Result;
-060import 
org.apache.hadoop.hbase.client.Table;
-061import 
org.apache.hadoop.hbase.filter.Filter;
-062import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-063import 
org.apache.hadoop.hbase.util.Bytes;
-064import 
org.apache.hadoop.hbase.zookeeper.ZKClusterId;
-065import 
org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-066import 
org.apache.hadoop.io.RawComparator;
-067import 
org.apache.hadoop.io.WritableComparable;
-068import 
org.apache.hadoop.io.WritableComparator;
-069import org.apache.hadoop.mapreduce.Job;
-070import 
org.apache.hadoop.mapreduce.Partitioner;
-071import 
org.apache.hadoop.mapreduce.Reducer;
-072import 
org.apache.hadoop.mapreduce.TaskCounter;
-073import 
org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-074import 
org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
-075import 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-076import 
org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
-077import org.apache.hadoop.util.Tool;
-078import 
org.apache.hadoop.util.ToolRunner;
-079import 
org.apache.zookeeper.KeeperException;
-080
+031import java.util.Locale;
+032import java.util.Map;
+033import java.util.TreeMap;
+034import java.util.UUID;
+035
+036import org.apache.commons.logging.Log;
+037import 
org.apache.commons.logging.LogFactory;
+038import 
org.apache.hadoop.conf.Configuration;
+039import 
org.apache.hadoop.conf.Configured;
+040import org.apache.hadoop.fs.FileSystem;
+041import org.apache.hadoop.fs.Path;
+042import org.apache.hadoop.hbase.Cell;
+043import 
org.apache.hadoop.hbase.CellComparator;
+044import 
org.apache.hadoop.hbase.CellUtil;
+045import 
org.apache.hadoop.hbase.HBaseConfiguration;
+046import 
org.apache.hadoop.hbase.KeyValue;
+047import 
org.apache.hadoop.hbase.KeyValueUtil;
+048import 
org.apache.hadoop.hbase.TableName;
+049import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
+050import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+051import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+052import 
org.apache.hadoop.hbase.client.Admin;
+053import 
org.apache.hadoop.hbase.client.Connection;
+054import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+055import 
org.apache.hadoop.hbase.client.Delete;
+056import 
org.apache.hadoop.hbase.client.Durability;
+057import 
org.apache.hadoop.hbase.client.Mutation;
+058import 
org.apache.hadoop.hbase.client.Put;
+059import 
org.apache.hadoop.hbase.client.RegionLocator;
+060import 
org.apache.hadoop.hbase.client.Result;
+061import 
org.apache.hadoop.hbase.client.Table;
+062import 
org.apache.hadoop.hbase.filter.Filter;
+063import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+064import 
org.apache.hadoop.hbase.util.Bytes;
+065import 
org.apache.hadoop.hbase.zookeeper.ZKClusterId;
+066import 
org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
+067import 
org.apache.hadoop.io.RawComparator;
+068import 
org.apache.hadoop.io.WritableComparable;
+069import 

[14/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
index d037968..3df7b03 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HDFSBlocksDistribution.html
@@ -262,11 +262,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HDFSBlocksDistribution
-StoreFileInfo.getHDFSBlockDistribution()
+StoreFile.getHDFSBlockDistribution()
 
 
 HDFSBlocksDistribution
-StoreFile.getHDFSBlockDistribution()
+StoreFileInfo.getHDFSBlockDistribution()
 
 
 HDFSBlocksDistribution



[09/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
index 86f6dd8..235a032 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
@@ -325,15 +325,15 @@
 
 
 private Server
-ServerManager.master
+ActiveMasterManager.master
 
 
 private Server
-ActiveMasterManager.master
+ServerManager.master
 
 
-private Server
-RegionStateStore.server
+protected Server
+BulkAssigner.server
 
 
 private Server
@@ -341,11 +341,11 @@
 
 
 private Server
-SplitLogManager.server
+RegionStateStore.server
 
 
-protected Server
-BulkAssigner.server
+private Server
+SplitLogManager.server
 
 
 
@@ -504,19 +504,19 @@
 
 
 private Server
-HeapMemoryManager.server
+SplitTransactionImpl.server
 
 
 private Server
-SplitTransactionImpl.server
+SplitTransactionImpl.DaughterOpener.server
 
 
 private Server
-SplitTransactionImpl.DaughterOpener.server
+LogRoller.server
 
 
 private Server
-LogRoller.server
+HeapMemoryManager.server
 
 
 
@@ -529,21 +529,21 @@
 
 
 Server
-RegionMergeTransactionImpl.getServer()
+RegionMergeTransaction.getServer()
+Get the Server running the transaction or rollback
+
 
 
 Server
-SplitTransactionImpl.getServer()
+RegionMergeTransactionImpl.getServer()
 
 
 Server
-SplitTransaction.getServer()
-Get the Server running the transaction or rollback
-
+SplitTransactionImpl.getServer()
 
 
 Server
-RegionMergeTransaction.getServer()
+SplitTransaction.getServer()
 Get the Server running the transaction or rollback
 
 
@@ -581,15 +581,24 @@
 
 
 Region
+RegionMergeTransaction.execute(Serverserver,
+  RegionServerServicesservices)
+Deprecated.
+use #execute(Server, RegionServerServices, 
User)
+
+
+
+
+Region
 RegionMergeTransactionImpl.execute(Serverserver,
   RegionServerServicesservices)
 
-
+
 PairOfSameTypeRegion
 SplitTransactionImpl.execute(Serverserver,
   RegionServerServicesservices)
 
-
+
 PairOfSameTypeRegion
 SplitTransaction.execute(Serverserver,
   RegionServerServicesservices)
@@ -598,28 +607,27 @@
 
 
 
-
+
 Region
-RegionMergeTransaction.execute(Serverserver,
-  RegionServerServicesservices)
-Deprecated.
-use #execute(Server, RegionServerServices, 
User)
-
+RegionMergeTransaction.execute(Serverserver,
+  RegionServerServicesservices,
+  Useruser)
+Run the transaction.
 
 
-
+
 Region
 RegionMergeTransactionImpl.execute(Serverserver,
   RegionServerServicesservices,
   Useruser)
 
-
+
 PairOfSameTypeRegion
 SplitTransactionImpl.execute(Serverserver,
   RegionServerServicesservices,
   Useruser)
 
-
+
 PairOfSameTypeRegion
 SplitTransaction.execute(Serverserver,
   RegionServerServicesservices,
@@ -627,14 +635,6 @@
 Run the transaction.
 
 
-
-Region
-RegionMergeTransaction.execute(Serverserver,
-  RegionServerServicesservices,
-  Useruser)
-Run the transaction.
-
-
 
 void
 ReplicationService.initialize(Serverrs,
@@ -670,55 +670,55 @@
 
 
 boolean
-RegionMergeTransactionImpl.rollback(Serverserver,
-RegionServerServicesservices)
+RegionMergeTransaction.rollback(Serverserver,
+RegionServerServicesservices)
+Deprecated.
+use #rollback(Server, RegionServerServices, 
User)
+
+
 
 
 boolean
-SplitTransactionImpl.rollback(Serverserver,
+RegionMergeTransactionImpl.rollback(Serverserver,
 RegionServerServicesservices)
 
 
 boolean
-SplitTransaction.rollback(Serverserver,
-RegionServerServicesservices)
-Deprecated.
-use #rollback(Server, RegionServerServices, User); as of 
1.0.2, remove in 3.0
-
-
+SplitTransactionImpl.rollback(Serverserver,
+RegionServerServicesservices)
 
 
 boolean
-RegionMergeTransaction.rollback(Serverserver,
+SplitTransaction.rollback(Serverserver,
 RegionServerServicesservices)
 Deprecated.
-use #rollback(Server, RegionServerServices, 
User)
+use #rollback(Server, RegionServerServices, User); as of 
1.0.2, remove in 3.0
 
 
 
 
 boolean
-RegionMergeTransactionImpl.rollback(Serverserver,
+RegionMergeTransaction.rollback(Serverserver,
 RegionServerServicesservices,
-Useruser)
+Useruser)
+Roll back a failed transaction
+
 
 
 boolean
-SplitTransactionImpl.rollback(Serverserver,
+RegionMergeTransactionImpl.rollback(Serverserver,
 RegionServerServicesservices,
 Useruser)
 
 
 boolean
-SplitTransaction.rollback(Serverserver,
+SplitTransactionImpl.rollback(Serverserver,
 RegionServerServicesservices,
-Useruser)
-Roll back a failed transaction
-
+Useruser)
 
 
 boolean

[41/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/client/HConnection.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/HConnection.html 
b/apidocs/org/apache/hadoop/hbase/client/HConnection.html
deleted file mode 100644
index 0baf2b2..000
--- a/apidocs/org/apache/hadoop/hbase/client/HConnection.html
+++ /dev/null
@@ -1,1825 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-HConnection (Apache HBase 2.0.0-SNAPSHOT API)
-
-
-
-
-
-
-JavaScript is disabled on your browser.
-
-
-
-
-
-
-
-
-Overview
-Package
-Class
-Use
-Tree
-Deprecated
-Index
-Help
-
-
-
-
-Prev 
Class
-Next 
Class
-
-
-Frames
-No Frames
-
-
-All Classes
-
-
-
-
-
-
-
-Summary:
-Nested|
-Field|
-Constr|
-Method
-
-
-Detail:
-Field|
-Constr|
-Method
-
-
-
-
-
-
-
-
-org.apache.hadoop.hbase.client
-Interface HConnection
-
-
-
-
-
-
-All Superinterfaces:
-org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true;
 title="class or interface in java.lang">AutoCloseable, http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable, Connection
-
-
-Deprecated.
-in favor of Connection 
and ConnectionFactory
-
-
-@InterfaceAudience.Public
-@InterfaceStability.Stable
-http://docs.oracle.com/javase/7/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-public interface HConnection
-extends Connection
-A cluster connection.  Knows how to find the master, locate 
regions out on the cluster,
- keeps a cache of locations and then knows how to re-calibrate after they 
move.  You need one
- of these to talk to your HBase cluster. ConnectionFactory manages 
instances of this
- class.  See it for how to get one of these.
-
- This is NOT a connection to a particular server but to ALL servers in the 
cluster.  Individual
- connections are managed at a lower level.
-
- HConnections are used by HTable mostly but also by
- HBaseAdmin, and MetaTableLocator.
-See Also:ConnectionFactory
-
-
-
-
-
-
-
-
-
-
-
-Field Summary
-
-Fields
-
-Modifier and Type
-Field and Description
-
-
-static http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-HBASE_CLIENT_CONNECTION_IMPL
-Deprecated.
-Key for configuration in Configuration whose value is the 
class we implement making a
- new HConnection instance.
-
-
-
-
-
-
-
-
-
-
-Method Summary
-
-Methods
-
-Modifier and Type
-Method and Description
-
-
-void
-clearCaches(ServerNamesn)
-Deprecated.
-internal method, do not use thru HConnection
-
-
-
-
-void
-clearRegionCache()
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-void
-clearRegionCache(byte[]tableName)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-void
-clearRegionCache(TableNametableName)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-void
-deleteCachedRegionLocation(HRegionLocationlocation)
-Deprecated.
-internal method, do not use thru HConnection
-
-
-
-
-Admin
-getAdmin()
-Deprecated.
-Retrieve an Admin implementation to administer an HBase 
cluster.
-
-
-
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-getAdmin(ServerNameserverName)
-Deprecated.
-internal method, do not use thru HConnection
-
-
-
-
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-getAdmin(ServerNameserverName,
-booleangetMaster)
-Deprecated.
-You can pass master flag but nothing special is 
done.
-
-
-
-
-org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
-getClient(ServerNameserverName)
-Deprecated.
-internal method, do not use thru HConnection
-
-
-
-
-org.apache.hadoop.conf.Configuration
-getConfiguration()
-Deprecated.
-
-
-
-int
-getCurrentNrHRS()
-Deprecated.
-This method will be changed from public to package 
protected.
-
-
-
-
-HTableDescriptor
-getHTableDescriptor(byte[]tableName)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-HTableDescriptor
-getHTableDescriptor(TableNametableName)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-HTableDescriptor[]
-getHTableDescriptors(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class 

[44/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 1fd6859..9ed6623 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -1015,15 +1015,15 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 
 
+Increment
+Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+
+
 Mutation
 Mutation.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 Method for setting the put's familyMap
 
 
-
-Increment
-Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
-
 
 Delete
 Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
@@ -1043,11 +1043,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-abstract Cell
-Filter.getNextCellHint(CellcurrentCell)
-If the filter returns the match code SEEK_NEXT_USING_HINT, 
then it should also tell which is
- the next key it must seek to.
-
+Cell
+MultiRowRangeFilter.getNextCellHint(CellcurrentKV)
 
 
 Cell
@@ -1055,55 +1052,58 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Cell
-MultipleColumnPrefixFilter.getNextCellHint(Cellcell)
+FuzzyRowFilter.getNextCellHint(CellcurrentCell)
 
 
 Cell
-FilterList.getNextCellHint(CellcurrentCell)
+MultipleColumnPrefixFilter.getNextCellHint(Cellcell)
 
 
 Cell
-ColumnPaginationFilter.getNextCellHint(Cellcell)
+TimestampsFilter.getNextCellHint(CellcurrentCell)
+Pick the next cell that the scanner should seek to.
+
 
 
 Cell
-FuzzyRowFilter.getNextCellHint(CellcurrentCell)
+FilterList.getNextCellHint(CellcurrentCell)
 
 
 Cell
-ColumnRangeFilter.getNextCellHint(Cellcell)
+ColumnPaginationFilter.getNextCellHint(Cellcell)
 
 
 Cell
-TimestampsFilter.getNextCellHint(CellcurrentCell)
-Pick the next cell that the scanner should seek to.
-
+ColumnRangeFilter.getNextCellHint(Cellcell)
 
 
-Cell
-MultiRowRangeFilter.getNextCellHint(CellcurrentKV)
-
-
 abstract Cell
-Filter.transformCell(Cellv)
-Give the filter a chance to transform the passed 
KeyValue.
+Filter.getNextCellHint(CellcurrentCell)
+If the filter returns the match code SEEK_NEXT_USING_HINT, 
then it should also tell which is
+ the next key it must seek to.
 
 
+
+Cell
+SkipFilter.transformCell(Cellv)
+
 
 Cell
 WhileMatchFilter.transformCell(Cellv)
 
 
 Cell
-FilterList.transformCell(Cellc)
+KeyOnlyFilter.transformCell(Cellcell)
 
 
 Cell
-KeyOnlyFilter.transformCell(Cellcell)
+FilterList.transformCell(Cellc)
 
 
-Cell
-SkipFilter.transformCell(Cellv)
+abstract Cell
+Filter.transformCell(Cellv)
+Give the filter a chance to transform the passed 
KeyValue.
+
 
 
 
@@ -1147,78 +1147,78 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 MultipleColumnPrefixFilter.filterColumn(Cellcell)
 
 
-abstract Filter.ReturnCode
-Filter.filterKeyValue(Cellv)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+Filter.ReturnCode
+MultiRowRangeFilter.filterKeyValue(Cellignored)
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cellcell)
+DependentColumnFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterKeyValue(Cellv)
+RandomRowFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-PrefixFilter.filterKeyValue(Cellv)
+ColumnPrefixFilter.filterKeyValue(Cellcell)
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cellv)
+SkipFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode

[38/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.html 
b/apidocs/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.html
index 321f4df..6976228 100644
--- a/apidocs/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.html
+++ b/apidocs/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.html
@@ -101,7 +101,7 @@
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class KeyStoreKeyProvider
+public class KeyStoreKeyProvider
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements KeyProvider
 A basic KeyProvider that can resolve keys from a protected 
KeyStore file
@@ -268,7 +268,7 @@ implements 
 
 store
-protectedhttp://docs.oracle.com/javase/7/docs/api/java/security/KeyStore.html?is-external=true;
 title="class or interface in java.security">KeyStore store
+protectedhttp://docs.oracle.com/javase/7/docs/api/java/security/KeyStore.html?is-external=true;
 title="class or interface in java.security">KeyStore store
 
 
 
@@ -277,7 +277,7 @@ implements 
 
 password
-protectedchar[] password
+protectedchar[] password
 
 
 
@@ -286,7 +286,7 @@ implements 
 
 passwordFile
-protectedhttp://docs.oracle.com/javase/7/docs/api/java/util/Properties.html?is-external=true;
 title="class or interface in java.util">Properties passwordFile
+protectedhttp://docs.oracle.com/javase/7/docs/api/java/util/Properties.html?is-external=true;
 title="class or interface in java.util">Properties passwordFile
 
 
 
@@ -303,7 +303,7 @@ implements 
 
 KeyStoreKeyProvider
-publicKeyStoreKeyProvider()
+publicKeyStoreKeyProvider()
 
 
 
@@ -320,7 +320,7 @@ implements 
 
 processParameter
-protectedvoidprocessParameter(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
+protectedvoidprocessParameter(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringvalue)
  throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Throws:
@@ -333,7 +333,7 @@ implements 
 
 processParameters
-protectedvoidprocessParameters(http://docs.oracle.com/javase/7/docs/api/java/net/URI.html?is-external=true;
 title="class or interface in java.net">URIuri)
+protectedvoidprocessParameters(http://docs.oracle.com/javase/7/docs/api/java/net/URI.html?is-external=true;
 title="class or interface in java.net">URIuri)
   throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Throws:
 http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -345,7 +345,7 @@ implements 
 
 load
-protectedvoidload(http://docs.oracle.com/javase/7/docs/api/java/net/URI.html?is-external=true;
 title="class or interface in java.net">URIuri)
+protectedvoidload(http://docs.oracle.com/javase/7/docs/api/java/net/URI.html?is-external=true;
 title="class or interface in java.net">URIuri)
  throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Throws:
 http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -357,7 +357,7 @@ implements 
 
 init
-publicvoidinit(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringparams)
+publicvoidinit(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringparams)
 Description copied from interface:KeyProvider
 Initialize the key provider
 
@@ -372,7 +372,7 @@ implements 
 
 getAliasPassword
-protectedchar[]getAliasPassword(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringalias)
+protectedchar[]getAliasPassword(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringalias)
 
 
 
@@ -381,7 +381,7 @@ implements 
 
 getKey
-publichttp://docs.oracle.com/javase/7/docs/api/java/security/Key.html?is-external=true;
 title="class or interface in java.security">KeygetKey(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or 

[19/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HColumnDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
index 1a0f53a..f38febf 100644
--- a/devapidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
@@ -101,7 +101,7 @@
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class HColumnDescriptor
+public class HColumnDescriptor
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableHColumnDescriptor
 An HColumnDescriptor contains information about a column 
family such as the
@@ -861,7 +861,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 COLUMN_DESCRIPTOR_VERSION
-private static finalbyte COLUMN_DESCRIPTOR_VERSION
+private static finalbyte COLUMN_DESCRIPTOR_VERSION
 See Also:Constant
 Field Values
 
 
@@ -871,7 +871,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 IN_MEMORY_COMPACTION
-private static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String IN_MEMORY_COMPACTION
+private static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String IN_MEMORY_COMPACTION
 See Also:Constant
 Field Values
 
 
@@ -881,7 +881,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 COMPRESSION
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String COMPRESSION
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String COMPRESSION
 See Also:Constant
 Field Values
 
 
@@ -891,7 +891,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 COMPRESSION_COMPACT
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String COMPRESSION_COMPACT
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String COMPRESSION_COMPACT
 See Also:Constant
 Field Values
 
 
@@ -901,7 +901,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 ENCODE_ON_DISK
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ENCODE_ON_DISK
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ENCODE_ON_DISK
 See Also:Constant
 Field Values
 
 
@@ -911,7 +911,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 DATA_BLOCK_ENCODING
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DATA_BLOCK_ENCODING
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DATA_BLOCK_ENCODING
 See Also:Constant
 Field Values
 
 
@@ -921,7 +921,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 BLOCKCACHE
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String BLOCKCACHE
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String BLOCKCACHE
 Key for the BLOCKCACHE attribute.
  A more exact name would be CACHE_DATA_ON_READ because this flag sets whether 
or not we
  cache DATA blocks.  We always cache INDEX and BLOOM blocks; caching these 
blocks cannot be
@@ -935,7 +935,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 CACHE_DATA_ON_WRITE
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CACHE_DATA_ON_WRITE
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CACHE_DATA_ON_WRITE
 See Also:Constant
 Field Values
 
 
@@ -945,7 +945,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 CACHE_INDEX_ON_WRITE
-public static 

[31/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/src-html/org/apache/hadoop/hbase/client/HConnection.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/HConnection.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/HConnection.html
deleted file mode 100644
index b403eb8..000
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/HConnection.html
+++ /dev/null
@@ -1,698 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-Source code
-
-
-
-
-001/**
-002 *
-003 * Licensed to the Apache Software 
Foundation (ASF) under one
-004 * or more contributor license 
agreements.  See the NOTICE file
-005 * distributed with this work for 
additional information
-006 * regarding copyright ownership.  The 
ASF licenses this file
-007 * to you under the Apache License, 
Version 2.0 (the
-008 * "License"); you may not use this file 
except in compliance
-009 * with the License.  You may obtain a 
copy of the License at
-010 *
-011 * 
http://www.apache.org/licenses/LICENSE-2.0
-012 *
-013 * Unless required by applicable law or 
agreed to in writing, software
-014 * distributed under the License is 
distributed on an "AS IS" BASIS,
-015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-016 * See the License for the specific 
language governing permissions and
-017 * limitations under the License.
-018 */
-019package org.apache.hadoop.hbase.client;
-020
-021import java.io.IOException;
-022import java.util.List;
-023import 
java.util.concurrent.ExecutorService;
-024import 
org.apache.hadoop.conf.Configuration;
-025import 
org.apache.hadoop.hbase.HRegionLocation;
-026import 
org.apache.hadoop.hbase.HTableDescriptor;
-027import 
org.apache.hadoop.hbase.MasterNotRunningException;
-028import 
org.apache.hadoop.hbase.ServerName;
-029import 
org.apache.hadoop.hbase.TableName;
-030import 
org.apache.hadoop.hbase.ZooKeeperConnectionException;
-031import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-032import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-033import 
org.apache.hadoop.hbase.client.coprocessor.Batch;
-034import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService;
-035import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService;
-036import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService;
-037
-038/**
-039 * A cluster connection.  Knows how to 
find the master, locate regions out on the cluster,
-040 * keeps a cache of locations and then 
knows how to re-calibrate after they move.  You need one
-041 * of these to talk to your HBase 
cluster. {@link ConnectionFactory} manages instances of this
-042 * class.  See it for how to get one of 
these.
-043 *
-044 * pThis is NOT a connection to a 
particular server but to ALL servers in the cluster.  Individual
-045 * connections are managed at a lower 
level.
-046 *
-047 * pHConnections are used by 
{@link HTable} mostly but also by
-048 * {@link HBaseAdmin}, and {@link 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator}. 
-049 *
-050 * @see ConnectionFactory
-051 * @deprecated in favor of {@link 
Connection} and {@link ConnectionFactory}
-052 */
-053@InterfaceAudience.Public
-054@InterfaceStability.Stable
-055@Deprecated
-056public interface HConnection extends 
Connection {
-057  /**
-058   * Key for configuration in 
Configuration whose value is the class we implement making a
-059   * new HConnection instance.
-060   */
-061  public static final String 
HBASE_CLIENT_CONNECTION_IMPL = "hbase.client.connection.impl";
-062
-063  /**
-064   * @return Configuration instance being 
used by this HConnection instance.
-065   */
-066  @Override
-067  Configuration getConfiguration();
-068
-069  /**
-070   * Retrieve an HTableInterface 
implementation for access to a table.
-071   * The returned HTableInterface is not 
thread safe, a new instance should
-072   * be created for each using thread.
-073   * This is a lightweight operation, 
pooling or caching of the returned HTableInterface
-074   * is neither required nor desired.
-075   * (created with {@link 
ConnectionFactory#createConnection(Configuration)}).
-076   * @param tableName
-077   * @return an HTable to use for 
interactions with this table
-078   */
-079  public HTableInterface getTable(String 
tableName) throws IOException;
-080
-081  /**
-082   * Retrieve an HTableInterface 
implementation for access to a table.
-083   * The returned HTableInterface is not 
thread safe, a new instance should
-084   * be created for each using thread.
-085   * This is a lightweight operation, 
pooling or caching of the returned HTableInterface
-086   * is neither required nor desired.
-087   * (created with {@link 
ConnectionFactory#createConnection(Configuration)}).
-088   * @param tableName
-089   * @return an HTable to use for 
interactions with this table
-090   */
-091  public HTableInterface 

[36/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
index 58b0e89..2f3ad05 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
@@ -116,100 +116,100 @@
 
 
 
-byte[]
-OrderedBlob.decode(PositionedByteRangesrc)
+T
+DataType.decode(PositionedByteRangesrc)
+Read an instance of T from the buffer 
src.
+
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
-OrderedInt32.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Number.html?is-external=true;
 title="class or interface in java.lang">Number
+OrderedNumeric.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-RawString.decode(PositionedByteRangesrc)
-
-
 http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
-OrderedInt64.decode(PositionedByteRangesrc)
-
-
-http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true;
 title="class or interface in java.lang">Double
-RawDouble.decode(PositionedByteRangesrc)
+RawLong.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
-RawInteger.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
+RawShort.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true;
 title="class or interface in java.lang">Double
-OrderedFloat64.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]
+Struct.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true;
 title="class or interface in java.lang">Float
-RawFloat.decode(PositionedByteRangesrc)
-
-
 T
 FixedLengthWrapper.decode(PositionedByteRangesrc)
 
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
+RawByte.decode(PositionedByteRangesrc)
+
 
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-OrderedString.decode(PositionedByteRangesrc)
+RawString.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Number.html?is-external=true;
 title="class or interface in java.lang">Number
-OrderedNumeric.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
+OrderedInt8.decode(PositionedByteRangesrc)
 
 
-T
-TerminatedWrapper.decode(PositionedByteRangesrc)
+byte[]
+RawBytes.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true;
 title="class or interface in java.lang">Float
-OrderedFloat32.decode(PositionedByteRangesrc)
+T
+TerminatedWrapper.decode(PositionedByteRangesrc)
 
 
-byte[]
-RawBytes.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+OrderedString.decode(PositionedByteRangesrc)
 
 
 http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
-RawLong.decode(PositionedByteRangesrc)
+OrderedInt64.decode(PositionedByteRangesrc)
 
 
 http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
-RawShort.decode(PositionedByteRangesrc)
+OrderedInt16.decode(PositionedByteRangesrc)
 
 
 byte[]
 OrderedBlobVar.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
-RawByte.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
+RawInteger.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
-OrderedInt8.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 

[13/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
index a449504..9188477 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
@@ -867,7 +867,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-AbstractRegionServerCallable.getHRegionInfo()
+ScannerCallableWithReplicas.getHRegionInfo()
 
 
 HRegionInfo
@@ -875,7 +875,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-ScannerCallableWithReplicas.getHRegionInfo()
+AbstractRegionServerCallable.getHRegionInfo()
 
 
 private HRegionInfo
@@ -1113,8 +1113,10 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-BaseMasterObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx,
-HRegionInforegionInfo)
+MasterObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx,
+HRegionInforegionInfo)
+Called after the region assignment has been requested.
+
 
 
 void
@@ -1123,16 +1125,16 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-MasterObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx,
-HRegionInforegionInfo)
-Called after the region assignment has been requested.
-
+BaseMasterObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx,
+HRegionInforegionInfo)
 
 
 void
-BaseMasterObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx,
 HTableDescriptordesc,
-HRegionInfo[]regions)
+HRegionInfo[]regions)
+Called after the createTable operation has been 
requested.
+
 
 
 void
@@ -1142,17 +1144,17 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-MasterObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx,
 HTableDescriptordesc,
-HRegionInfo[]regions)
-Called after the createTable operation has been 
requested.
-
+HRegionInfo[]regions)
 
 
 void
-BaseMasterObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx,
   HTableDescriptordesc,
-  HRegionInfo[]regions)
+  HRegionInfo[]regions)
+Called after the createTable operation has been 
requested.
+
 
 
 void
@@ -1162,21 +1164,19 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-MasterObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterObserver.postCreateTable(ObserverContextMasterCoprocessorEnvironmentctx,
   HTableDescriptordesc,
-  HRegionInfo[]regions)
-Called after the createTable operation has been 
requested.
-
+  HRegionInfo[]regions)
 
 
 void
-BaseMasterObserver.postCreateTableHandler(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.postCreateTableHandler(ObserverContextMasterCoprocessorEnvironmentctx,
 HTableDescriptordesc,
 HRegionInfo[]regions)
 Deprecated.
 As of release 2.0.0, this will be removed in HBase 3.0.0
(https://issues.apache.org/jira/browse/HBASE-15575;>HBASE-15575).
-   Use BaseMasterObserver.postCompletedCreateTableAction(ObserverContext,
 HTableDescriptor, HRegionInfo[])
+   Use MasterObserver.postCompletedCreateTableAction(ObserverContext,
 HTableDescriptor, HRegionInfo[])
 
 
 
@@ -1190,21 +1190,23 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-MasterObserver.postCreateTableHandler(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterObserver.postCreateTableHandler(ObserverContextMasterCoprocessorEnvironmentctx,
 HTableDescriptordesc,
 HRegionInfo[]regions)
 Deprecated.
 As of release 2.0.0, this will be removed in HBase 3.0.0

[48/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apache_hbase_reference_guide.pdfmarks
--
diff --git a/apache_hbase_reference_guide.pdfmarks 
b/apache_hbase_reference_guide.pdfmarks
index 606c8da..e311fb3 100644
--- a/apache_hbase_reference_guide.pdfmarks
+++ b/apache_hbase_reference_guide.pdfmarks
@@ -2,8 +2,8 @@
   /Author (Apache HBase Team)
   /Subject ()
   /Keywords ()
-  /ModDate (D:20160527144439)
-  /CreationDate (D:20160527144439)
+  /ModDate (D:20160531144329)
+  /CreationDate (D:20160531144329)
   /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
   /Producer ()
   /DOCINFO pdfmark

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/allclasses-frame.html
--
diff --git a/apidocs/allclasses-frame.html b/apidocs/allclasses-frame.html
index 5506a65..5a6ca27 100644
--- a/apidocs/allclasses-frame.html
+++ b/apidocs/allclasses-frame.html
@@ -116,7 +116,6 @@
 HBaseIOException
 HBaseSnapshotException
 HColumnDescriptor
-HConnection
 HConstants
 HFileOutputFormat2
 HLogInputFormat

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/allclasses-noframe.html
--
diff --git a/apidocs/allclasses-noframe.html b/apidocs/allclasses-noframe.html
index 31482f1..46a9619 100644
--- a/apidocs/allclasses-noframe.html
+++ b/apidocs/allclasses-noframe.html
@@ -116,7 +116,6 @@
 HBaseIOException
 HBaseSnapshotException
 HColumnDescriptor
-HConnection
 HConstants
 HFileOutputFormat2
 HLogInputFormat

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/constant-values.html
--
diff --git a/apidocs/constant-values.html b/apidocs/constant-values.html
index 87c46e7..dd405c1 100644
--- a/apidocs/constant-values.html
+++ b/apidocs/constant-values.html
@@ -2679,25 +2679,6 @@
 
 
 
-org.apache.hadoop.hbase.client.HConnection
-
-Modifier and Type
-Constant Field
-Value
-
-
-
-
-
-publicstaticfinalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-HBASE_CLIENT_CONNECTION_IMPL
-"hbase.client.connection.impl"
-
-
-
-
-
-
 org.apache.hadoop.hbase.client.HTableMultiplexer
 
 Modifier and Type

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/deprecated-list.html
--
diff --git a/apidocs/deprecated-list.html b/apidocs/deprecated-list.html
index 464bfc6..d78011d 100644
--- a/apidocs/deprecated-list.html
+++ b/apidocs/deprecated-list.html
@@ -65,7 +65,6 @@
 Deprecated API
 Contents
 
-Deprecated Interfaces
 Deprecated Classes
 Deprecated Exceptions
 Deprecated Fields
@@ -73,27 +72,7 @@
 Deprecated Constructors
 
 
-
-
-
-
-
-
-Deprecated Interfaces
-
-Interface and Description
-
-
-
-org.apache.hadoop.hbase.client.HConnection
-in favor of Connection and ConnectionFactory
-
-
-
-
-
-
-
+
 
 
 
@@ -232,34 +211,9 @@
 
 
 
-org.apache.hadoop.hbase.client.HConnection.clearCaches(ServerName)
-internal method, do not use thru HConnection
-
-
-
-org.apache.hadoop.hbase.client.HConnection.clearRegionCache()
-internal method, do not use through HConnection
-
-
-
-org.apache.hadoop.hbase.client.HConnection.clearRegionCache(byte[])
-internal method, do not use through HConnection
-
-
-
-org.apache.hadoop.hbase.client.HConnection.clearRegionCache(TableName)
-internal method, do not use through HConnection
-
-
-
 org.apache.hadoop.hbase.mapreduce.CellCreator.create(byte[],
 int, int, byte[], int, int, byte[], int, int, long, byte[], int, int, 
String)
 
 
-org.apache.hadoop.hbase.client.HConnection.deleteCachedRegionLocation(HRegionLocation)
-internal method, do not use thru HConnection
-
-
-
 org.apache.hadoop.hbase.client.Admin.deleteColumn(TableName,
 byte[])
 As of release 2.0.0.
  (https://issues.apache.org/jira/browse/HBASE-1989;>HBASE-1989).
@@ -267,36 +221,21 @@
  Use Admin.deleteColumnFamily(TableName,
 byte[])}.
 
 
-
+
 org.apache.hadoop.hbase.rest.client.RemoteHTable.exists(List)
 
-
+
 org.apache.hadoop.hbase.filter.Filter.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this will be removed in HBase 3.0.0.
  Instead use Filter.filterRowKey(Cell)
 
 
-
-org.apache.hadoop.hbase.client.HConnection.getAdmin(ServerName)
-internal method, do not use thru HConnection
-
-
 
-org.apache.hadoop.hbase.client.HConnection.getAdmin(ServerName,
 boolean)
-You can pass master flag but nothing special is 
done.
-
-
-
 org.apache.hadoop.hbase.client.Admin.getAlterStatus(byte[])
 Since 2.0.0. Will be removed in 3.0.0. Use Admin.getAlterStatus(TableName)
  instead.
 
 
-
-org.apache.hadoop.hbase.client.HConnection.getClient(ServerName)
-internal method, do not use thru HConnection
-
-
 
 

[28/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
index cac98c7..74c422b 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.html
@@ -452,7 +452,7 @@
 444  private void 
validateFamiliesInHFiles(Table table, DequeLoadQueueItem queue)
 445  throws IOException {
 446CollectionHColumnDescriptor 
families = table.getTableDescriptor().getFamilies();
-447ListString familyNames = new 
ArrayListString(families.size());
+447ListString familyNames = new 
ArrayList(families.size());
 448for (HColumnDescriptor family : 
families) {
 449  
familyNames.add(family.getNameAsString());
 450}
@@ -509,7 +509,7 @@
 501  ExecutorService pool, 
DequeLoadQueueItem queue,
 502  final MultimapByteBuffer, 
LoadQueueItem regionGroups) throws IOException {
 503// atomically bulk load the groups.
-504
SetFutureListLoadQueueItem loadingFutures = new 
HashSetFutureListLoadQueueItem();
+504
SetFutureListLoadQueueItem loadingFutures = new 
HashSet();
 505for (EntryByteBuffer, ? extends 
CollectionLoadQueueItem e: regionGroups.asMap().entrySet()){
 506  final byte[] first = 
e.getKey().array();
 507  final 
CollectionLoadQueueItem lqis =  e.getValue();

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
index 18172d6..1f612d6 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
@@ -29,278 +29,279 @@
 021import java.io.IOException;
 022import java.util.Collections;
 023import java.util.List;
-024
-025import org.apache.commons.logging.Log;
-026import 
org.apache.commons.logging.LogFactory;
-027import 
org.apache.hadoop.conf.Configurable;
-028import 
org.apache.hadoop.conf.Configuration;
-029import 
org.apache.hadoop.hbase.KeyValue;
-030import 
org.apache.hadoop.hbase.TableName;
-031import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-032import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-033import 
org.apache.hadoop.hbase.client.Connection;
-034import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-035import 
org.apache.hadoop.hbase.client.RegionLocator;
-036import 
org.apache.hadoop.hbase.client.Scan;
-037import 
org.apache.hadoop.hbase.util.Bytes;
-038import 
org.apache.hadoop.mapreduce.InputSplit;
-039import 
org.apache.hadoop.mapreduce.JobContext;
-040import 
org.apache.hadoop.hbase.util.Pair;
-041import org.apache.hadoop.mapreduce.Job;
-042import 
org.apache.hadoop.util.StringUtils;
-043
-044/**
-045 * Convert HBase tabular data into a 
format that is consumable by Map/Reduce.
-046 */
-047@InterfaceAudience.Public
-048@InterfaceStability.Stable
-049public class TableInputFormat extends 
TableInputFormatBase
-050implements Configurable {
-051
-052  @SuppressWarnings("hiding")
-053  private static final Log LOG = 
LogFactory.getLog(TableInputFormat.class);
-054
-055  /** Job parameter that specifies the 
input table. */
-056  public static final String INPUT_TABLE 
= "hbase.mapreduce.inputtable";
-057  /**
-058   * If specified, use start keys of this 
table to split.
-059   * This is useful when you are 
preparing data for bulkload.
-060   */
-061  private static final String SPLIT_TABLE 
= "hbase.mapreduce.splittable";
-062  /** Base-64 encoded scanner. All other 
SCAN_ confs are ignored if this is specified.
-063   * See {@link 
TableMapReduceUtil#convertScanToString(Scan)} for more details.
-064   */
-065  public static final String SCAN = 
"hbase.mapreduce.scan";
-066  /** Scan start row */
-067  public static final String 
SCAN_ROW_START = "hbase.mapreduce.scan.row.start";
-068  /** Scan stop row */
-069  public static final String 
SCAN_ROW_STOP = "hbase.mapreduce.scan.row.stop";
-070  /** Column Family to Scan */
-071  public static final String 
SCAN_COLUMN_FAMILY = "hbase.mapreduce.scan.column.family";
-072  /** Space delimited list of columns and 
column families to scan. */
-073  public static final String SCAN_COLUMNS 
= "hbase.mapreduce.scan.columns";
-074  /** The timestamp used to filter 
columns with a specific timestamp. */
-075  public static final String 
SCAN_TIMESTAMP = "hbase.mapreduce.scan.timestamp";
-076  /** The starting timestamp used to 
filter 

[42/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/client/Connection.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Connection.html 
b/apidocs/org/apache/hadoop/hbase/client/Connection.html
index cc0704f..46ea826 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Connection.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Connection.html
@@ -89,15 +89,11 @@
 All Superinterfaces:
 org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true;
 title="class or interface in java.lang">AutoCloseable, http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable
 
-
-All Known Subinterfaces:
-HConnection
-
 
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public interface Connection
+public interface Connection
 extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable
 A cluster connection encapsulating lower level individual 
connections to actual servers and
  a connection to zookeeper. Connections are instantiated through the ConnectionFactory
@@ -114,9 +110,7 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
  Table and 
Admin 
instances, on the other hand, are light-weight and are not
  thread-safe.  Typically, a single connection per client application is 
instantiated and every
  thread will obtain its own Table instance. Caching or pooling of Table and 
Admin
- is not recommended.
-
- This class replaces HConnection, which is now 
deprecated.
+ is not recommended.
 Since:
   0.99.0
 See Also:ConnectionFactory
@@ -218,7 +212,7 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 getConfiguration
-org.apache.hadoop.conf.ConfigurationgetConfiguration()
+org.apache.hadoop.conf.ConfigurationgetConfiguration()
 Returns:Configuration instance 
being used by this Connection instance.
 
 
@@ -228,7 +222,7 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 getTable
-TablegetTable(TableNametableName)
+TablegetTable(TableNametableName)
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Retrieve a Table implementation for accessing a table.
  The returned Table is not thread safe, a new instance should be created for 
each using thread.
@@ -253,7 +247,7 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 getTable
-TablegetTable(TableNametableName,
+TablegetTable(TableNametableName,
  http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServicepool)
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Retrieve a Table implementation for accessing a table.
@@ -279,7 +273,7 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 getBufferedMutator
-BufferedMutatorgetBufferedMutator(TableNametableName)
+BufferedMutatorgetBufferedMutator(TableNametableName)
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
  Retrieve a BufferedMutator for performing 
client-side buffering of writes. The
@@ -305,7 +299,7 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 getBufferedMutator
-BufferedMutatorgetBufferedMutator(BufferedMutatorParamsparams)
+BufferedMutatorgetBufferedMutator(BufferedMutatorParamsparams)
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Retrieve a BufferedMutator for performing 
client-side buffering of writes. The
  BufferedMutator returned by 
this method is thread-safe. This object can be used for
@@ -323,7 +317,7 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 getRegionLocator
-RegionLocatorgetRegionLocator(TableNametableName)
+RegionLocatorgetRegionLocator(TableNametableName)
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Retrieve a RegionLocator implementation to inspect region 
information on a table. The returned
  RegionLocator is not thread-safe, so a new instance should be created for 
each using thread.
@@ -347,7 +341,7 @@ extends org.apache.hadoop.hbase.Abortable, http://docs.oracle.com/javas
 
 
 getAdmin

[52/52] hbase-site git commit: Empty commit

2016-05-31 Thread misty
Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/7fb45f8e
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/7fb45f8e
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/7fb45f8e

Branch: refs/heads/asf-site
Commit: 7fb45f8e49056c16748012228673b679eb47dddb
Parents: db523e4
Author: Misty Stanley-Jones 
Authored: Tue May 31 10:50:05 2016 -0700
Committer: Misty Stanley-Jones 
Committed: Tue May 31 10:50:05 2016 -0700

--

--




[03/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/client/Connection.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Connection.html 
b/devapidocs/org/apache/hadoop/hbase/client/Connection.html
index 531c47f..a58328d 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Connection.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Connection.html
@@ -91,7 +91,7 @@
 
 
 All Known Subinterfaces:
-ClusterConnection, HConnection
+ClusterConnection
 
 
 All Known Implementing Classes:
@@ -101,7 +101,7 @@
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public interface Connection
+public interface Connection
 extends Abortable, http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable
 A cluster connection encapsulating lower level individual 
connections to actual servers and
  a connection to zookeeper. Connections are instantiated through the ConnectionFactory
@@ -118,9 +118,7 @@ extends Table and 
Admin 
instances, on the other hand, are light-weight and are not
  thread-safe.  Typically, a single connection per client application is 
instantiated and every
  thread will obtain its own Table instance. Caching or pooling of Table and 
Admin
- is not recommended.
-
- This class replaces HConnection, which is now 
deprecated.
+ is not recommended.
 Since:
   0.99.0
 See Also:ConnectionFactory
@@ -222,7 +220,7 @@ extends 
 
 getConfiguration
-org.apache.hadoop.conf.ConfigurationgetConfiguration()
+org.apache.hadoop.conf.ConfigurationgetConfiguration()
 Returns:Configuration instance 
being used by this Connection instance.
 
 
@@ -232,7 +230,7 @@ extends 
 
 getTable
-TablegetTable(TableNametableName)
+TablegetTable(TableNametableName)
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Retrieve a Table implementation for accessing a table.
  The returned Table is not thread safe, a new instance should be created for 
each using thread.
@@ -257,7 +255,7 @@ extends 
 
 getTable
-TablegetTable(TableNametableName,
+TablegetTable(TableNametableName,
  http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServicepool)
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Retrieve a Table implementation for accessing a table.
@@ -283,7 +281,7 @@ extends 
 
 getBufferedMutator
-BufferedMutatorgetBufferedMutator(TableNametableName)
+BufferedMutatorgetBufferedMutator(TableNametableName)
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
  Retrieve a BufferedMutator for performing 
client-side buffering of writes. The
@@ -309,7 +307,7 @@ extends 
 
 getBufferedMutator
-BufferedMutatorgetBufferedMutator(BufferedMutatorParamsparams)
+BufferedMutatorgetBufferedMutator(BufferedMutatorParamsparams)
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Retrieve a BufferedMutator for performing 
client-side buffering of writes. The
  BufferedMutator returned by 
this method is thread-safe. This object can be used for
@@ -327,7 +325,7 @@ extends 
 
 getRegionLocator
-RegionLocatorgetRegionLocator(TableNametableName)
+RegionLocatorgetRegionLocator(TableNametableName)
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Retrieve a RegionLocator implementation to inspect region 
information on a table. The returned
  RegionLocator is not thread-safe, so a new instance should be created for 
each using thread.
@@ -351,7 +349,7 @@ extends 
 
 getAdmin
-AdmingetAdmin()
+AdmingetAdmin()
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Retrieve an Admin implementation to administer an HBase 
cluster.
  The returned Admin is not guaranteed to be thread-safe.  A new instance 
should be created for
@@ -371,7 +369,7 @@ extends 
 
 close
-voidclose()
+voidclose()
throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Specified by:
@@ -388,7 +386,7 @@ extends 
 
 isClosed
-booleanisClosed()
+booleanisClosed()
 Returns whether the connection is closed or not.
 Returns:true if this connection 
is closed
 


[34/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html
index e7a56e9..87168fe 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HColumnDescriptor.html
@@ -32,1236 +32,1237 @@
 024import java.util.Collections;
 025import java.util.HashMap;
 026import java.util.HashSet;
-027import java.util.Map;
-028import java.util.Set;
-029
-030import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-032import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-033import 
org.apache.hadoop.hbase.exceptions.HBaseException;
-034import 
org.apache.hadoop.hbase.io.compress.Compression;
-035import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-036import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-037import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema;
-038import 
org.apache.hadoop.hbase.regionserver.BloomType;
-039import 
org.apache.hadoop.hbase.util.Bytes;
-040import 
org.apache.hadoop.hbase.util.PrettyPrinter;
-041import 
org.apache.hadoop.hbase.util.PrettyPrinter.Unit;
-042
+027import java.util.Locale;
+028import java.util.Map;
+029import java.util.Set;
+030
+031import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+032import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+033import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
+034import 
org.apache.hadoop.hbase.exceptions.HBaseException;
+035import 
org.apache.hadoop.hbase.io.compress.Compression;
+036import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+037import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+038import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema;
+039import 
org.apache.hadoop.hbase.regionserver.BloomType;
+040import 
org.apache.hadoop.hbase.util.Bytes;
+041import 
org.apache.hadoop.hbase.util.PrettyPrinter;
+042import 
org.apache.hadoop.hbase.util.PrettyPrinter.Unit;
 043
-044/**
-045 * An HColumnDescriptor contains 
information about a column family such as the
-046 * number of versions, compression 
settings, etc.
-047 *
-048 * It is used as input when creating a 
table or adding a column.
-049 */
-050@InterfaceAudience.Public
-051@InterfaceStability.Evolving
-052public class HColumnDescriptor implements 
ComparableHColumnDescriptor {
-053  // For future backward compatibility
-054
-055  // Version  3 was when column names 
become byte arrays and when we picked up
-056  // Time-to-live feature.  Version 4 was 
when we moved to byte arrays, HBASE-82.
-057  // Version  5 was when bloom filter 
descriptors were removed.
-058  // Version  6 adds metadata as a map 
where keys and values are byte[].
-059  // Version  7 -- add new compression 
and hfile blocksize to HColumnDescriptor (HBASE-1217)
-060  // Version  8 -- reintroduction of 
bloom filters, changed from boolean to enum
-061  // Version  9 -- add data block 
encoding
-062  // Version 10 -- change metadata to 
standard type.
-063  // Version 11 -- add column family 
level configuration.
-064  private static final byte 
COLUMN_DESCRIPTOR_VERSION = (byte) 11;
-065
-066  private static final String 
IN_MEMORY_COMPACTION = "IN_MEMORY_COMPACTION";
-067
-068  // These constants are used as FileInfo 
keys
-069  public static final String COMPRESSION 
= "COMPRESSION";
-070  public static final String 
COMPRESSION_COMPACT = "COMPRESSION_COMPACT";
-071  public static final String 
ENCODE_ON_DISK = // To be removed, it is not used anymore
-072  "ENCODE_ON_DISK";
-073  public static final String 
DATA_BLOCK_ENCODING =
-074  "DATA_BLOCK_ENCODING";
-075  /**
-076   * Key for the BLOCKCACHE attribute.
-077   * A more exact name would be 
CACHE_DATA_ON_READ because this flag sets whether or not we
-078   * cache DATA blocks.  We always cache 
INDEX and BLOOM blocks; caching these blocks cannot be
-079   * disabled.
-080   */
-081  public static final String BLOCKCACHE = 
"BLOCKCACHE";
-082  public static final String 
CACHE_DATA_ON_WRITE = "CACHE_DATA_ON_WRITE";
-083  public static final String 
CACHE_INDEX_ON_WRITE = "CACHE_INDEX_ON_WRITE";
-084  public static final String 
CACHE_BLOOMS_ON_WRITE = "CACHE_BLOOMS_ON_WRITE";
-085  public static final String 
EVICT_BLOCKS_ON_CLOSE = "EVICT_BLOCKS_ON_CLOSE";
-086  /**
-087   * Key for cache data into L1 if cache 
is set up with more than one tier.
-088   * To set in the shell, do something 
like this:
-089   * 
codehbase(main):003:0gt; create 't',
-090   *{NAME =gt; 't', 
CONFIGURATION =gt; {CACHE_DATA_IN_L1 =gt; 'true'}}/code
-091   */
-092  public static final String 

[51/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/db523e4d
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/db523e4d
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/db523e4d

Branch: refs/heads/asf-site
Commit: db523e4d8772fd31bd3fec368b01fde5921f2d28
Parents: 4f67e09
Author: jenkins 
Authored: Tue May 31 14:53:06 2016 +
Committer: Misty Stanley-Jones 
Committed: Tue May 31 10:49:47 2016 -0700

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf|  8005 +-
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 apidocs/allclasses-frame.html   | 1 -
 apidocs/allclasses-noframe.html | 1 -
 apidocs/constant-values.html|19 -
 apidocs/deprecated-list.html|   305 +-
 apidocs/index-all.html  |   367 -
 .../apache/hadoop/hbase/HColumnDescriptor.html  |   260 +-
 apidocs/org/apache/hadoop/hbase/HConstants.html |40 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   | 4 +-
 apidocs/org/apache/hadoop/hbase/ServerName.html |64 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   228 +-
 .../hadoop/hbase/class-use/HRegionLocation.html |   139 -
 .../hbase/class-use/HTableDescriptor.html   |46 +-
 .../class-use/MasterNotRunningException.html|56 +-
 .../hadoop/hbase/class-use/ServerName.html  |61 +-
 .../hadoop/hbase/class-use/TableName.html   |   287 +-
 .../class-use/ZooKeeperConnectionException.html |48 +-
 .../hadoop/hbase/client/BufferedMutator.html| 2 +-
 .../apache/hadoop/hbase/client/Connection.html  |28 +-
 .../apache/hadoop/hbase/client/Consistency.html | 4 +-
 apidocs/org/apache/hadoop/hbase/client/Get.html | 4 +-
 .../apache/hadoop/hbase/client/HConnection.html |  1825 ---
 .../hadoop/hbase/client/HTableMultiplexer.html  | 4 +-
 .../hadoop/hbase/client/class-use/Admin.html| 7 -
 .../hbase/client/class-use/Connection.html  |17 -
 .../hbase/client/class-use/Consistency.html |10 +-
 .../hbase/client/class-use/Durability.html  | 8 +-
 .../hbase/client/class-use/HConnection.html |   115 -
 .../hbase/client/class-use/IsolationLevel.html  |10 +-
 .../hbase/client/class-use/RegionLocator.html   | 7 -
 .../hadoop/hbase/client/class-use/Result.html   |48 +-
 .../hadoop/hbase/client/class-use/Row.html  |52 +-
 .../hadoop/hbase/client/package-frame.html  | 1 -
 .../hadoop/hbase/client/package-summary.html|14 +-
 .../hadoop/hbase/client/package-tree.html   |20 +-
 .../hbase/filter/SubstringComparator.html   |12 +-
 .../filter/class-use/Filter.ReturnCode.html |60 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |50 +-
 .../hadoop/hbase/filter/package-tree.html   | 6 +-
 .../io/class-use/ImmutableBytesWritable.html|68 +-
 .../hadoop/hbase/io/class-use/TimeRange.html|12 +-
 .../hbase/io/crypto/KeyStoreKeyProvider.html|24 +-
 .../hbase/io/crypto/class-use/Cipher.html   |16 +-
 .../hbase/io/encoding/DataBlockEncoding.html| 4 +-
 .../apache/hadoop/hbase/mapreduce/Import.html   |34 +-
 .../hbase/mapreduce/TableInputFormat.html   |48 +-
 .../org/apache/hadoop/hbase/package-use.html|22 +-
 .../apache/hadoop/hbase/quotas/QuotaType.html   | 4 +-
 .../hbase/quotas/ThrottlingException.Type.html  | 4 +-
 .../hadoop/hbase/quotas/package-tree.html   | 2 +-
 .../apache/hadoop/hbase/util/RegionMover.html   |26 +-
 .../hadoop/hbase/util/class-use/Bytes.html  |16 +-
 .../hadoop/hbase/util/class-use/Order.html  |42 +-
 .../hadoop/hbase/util/class-use/Pair.html   | 4 +-
 .../util/class-use/PositionedByteRange.html |   380 +-
 apidocs/overview-tree.html  |39 +-
 .../apache/hadoop/hbase/HColumnDescriptor.html  |  2459 ++--
 .../org/apache/hadoop/hbase/HConstants.html |12 +-
 .../apache/hadoop/hbase/LocalHBaseCluster.html  | 4 +-
 .../org/apache/hadoop/hbase/ServerName.html |   771 +-
 .../BufferedMutator.ExceptionListener.html  | 2 +-
 .../hadoop/hbase/client/BufferedMutator.html| 2 +-
 .../apache/hadoop/hbase/client/Connection.html  |   257 +-
 .../hadoop/hbase/client/ConnectionFactory.html  | 2 +-
 .../apache/hadoop/hbase/client/HConnection.html |   698 -
 .../org/apache/hadoop/hbase/client/Table.html   | 8 +-
 .../hbase/filter/SubstringComparator.html   |   183 +-
 .../hbase/io/crypto/KeyStoreKeyProvider.html|   321 +-
 .../hbase/ipc/SyncCoprocessorRpcChannel.html| 2 +-
 .../apache/hadoop/hbase/mapreduce/Import.html   |  

[22/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/deprecated-list.html
--
diff --git a/devapidocs/deprecated-list.html b/devapidocs/deprecated-list.html
index f50cd69..c7f59dc 100644
--- a/devapidocs/deprecated-list.html
+++ b/devapidocs/deprecated-list.html
@@ -85,11 +85,6 @@
 
 
 
-org.apache.hadoop.hbase.client.HConnection
-in favor of Connection and ConnectionFactory
-
-
-
 org.apache.hadoop.hbase.client.HTableInterface
 use Table 
instead
 
@@ -364,26 +359,6 @@
 
 
 
-org.apache.hadoop.hbase.client.HConnection.clearCaches(ServerName)
-internal method, do not use thru HConnection
-
-
-
-org.apache.hadoop.hbase.client.HConnection.clearRegionCache()
-internal method, do not use through HConnection
-
-
-
-org.apache.hadoop.hbase.client.HConnection.clearRegionCache(byte[])
-internal method, do not use through HConnection
-
-
-
-org.apache.hadoop.hbase.client.HConnection.clearRegionCache(TableName)
-internal method, do not use through HConnection
-
-
-
 org.apache.hadoop.hbase.regionserver.Store.compact(CompactionContext,
 ThroughputController)
 see compact(CompactionContext, ThroughputController, 
User)
 
@@ -403,11 +378,6 @@
 org.apache.hadoop.hbase.regionserver.SimpleRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
-org.apache.hadoop.hbase.client.HConnection.deleteCachedRegionLocation(HRegionLocation)
-internal method, do not use thru HConnection
-
-
-
 org.apache.hadoop.hbase.client.Admin.deleteColumn(TableName,
 byte[])
 As of release 2.0.0.
  (https://issues.apache.org/jira/browse/HBASE-1989;>HBASE-1989).
@@ -415,24 +385,19 @@
  Use Admin.deleteColumnFamily(TableName,
 byte[])}.
 
 
-
+
 org.apache.hadoop.hbase.client.HBaseAdmin.deleteColumn(TableName,
 byte[])
 Since 2.0. Will be removed in 3.0. Use
  HBaseAdmin.deleteColumnFamily(TableName,
 byte[]) instead.
 
 
-
+
 org.apache.hadoop.hbase.KeyValueUtil.ensureKeyValue(Cell)
 without any replacement.
 
 
-
-org.apache.hadoop.hbase.KeyValueUtil.ensureKeyValues(List)
-
 
-org.apache.hadoop.hbase.regionserver.SplitTransaction.execute(Server,
 RegionServerServices)
-use #execute(Server, RegionServerServices, User);  as of 
1.0.2, remove in 3.0
-
+org.apache.hadoop.hbase.KeyValueUtil.ensureKeyValues(List)
 
 
 org.apache.hadoop.hbase.regionserver.RegionMergeTransaction.execute(Server,
 RegionServerServices)
@@ -440,66 +405,56 @@
 
 
 
+org.apache.hadoop.hbase.regionserver.SplitTransaction.execute(Server,
 RegionServerServices)
+use #execute(Server, RegionServerServices, User);  as of 
1.0.2, remove in 3.0
+
+
+
 org.apache.hadoop.hbase.client.HTableWrapper.exists(List)
 Use HTableWrapper.existsAll(java.util.List)
  instead. since 2.0.  remove in 3.0
 
 
-
+
 org.apache.hadoop.hbase.rest.client.RemoteHTable.exists(List)
 
-
+
 org.apache.hadoop.hbase.filter.Filter.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this will be removed in HBase 3.0.0.
  Instead use Filter.filterRowKey(Cell)
 
 
-
+
 org.apache.hadoop.hbase.filter.FilterBase.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this will be removed in HBase 3.0.0.
  Instead use FilterBase.filterRowKey(Cell)
 
 
-
+
 org.apache.hadoop.hbase.client.HTableInterface.flushCommits()
 as of 1.0.0. Replaced by BufferedMutator.flush()
 
 
-
-org.apache.hadoop.hbase.client.HConnection.getAdmin(ServerName)
-internal method, do not use thru HConnection
-
-
 
-org.apache.hadoop.hbase.client.HConnection.getAdmin(ServerName,
 boolean)
-You can pass master flag but nothing special is 
done.
-
-
-
 org.apache.hadoop.hbase.client.Admin.getAlterStatus(byte[])
 Since 2.0.0. Will be removed in 3.0.0. Use Admin.getAlterStatus(TableName)
  instead.
 
 
-
+
 org.apache.hadoop.hbase.security.visibility.VisibilityClient.getAuths(Configuration,
 String)
 Use VisibilityClient.getAuths(Connection,String)
 instead.
 
 
-
+
 org.apache.hadoop.hbase.KeyValue.getBuffer()
 Since 0.98.0.  Use Cell Interface instead.  Do not 
presume single backing buffer.
 
 
-
+
 org.apache.hadoop.hbase.zookeeper.ZKUtil.getChildDataAndWatchForNewChildren(ZooKeeperWatcher,
 String)
 Unused
 
 
-
-org.apache.hadoop.hbase.client.HConnection.getClient(ServerName)
-internal method, do not use thru HConnection
-
-
 
 org.apache.hadoop.hbase.HColumnDescriptor.getCompactionCompression()
 As of release 2.0.0, this will be removed in HBase 3.0.0
@@ -520,16 +475,6 @@
 
 
 
-org.apache.hadoop.hbase.client.HTable.getConnection()
-This method will be changed from public to package 
protected.
-
-
-
-org.apache.hadoop.hbase.client.HConnection.getCurrentNrHRS()
-This method will be changed from public to package 
protected.
-
-
-
 org.apache.hadoop.hbase.ClusterStatus.getDeadServers()
 As of release 2.0.0, this will be removed in HBase 3.0.0
  (https://issues.apache.org/jira/browse/HBASE-13656;>HBASE-13656).
@@ -543,274 +488,170 @@
 
 
 

[17/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 61af1e6..6f271cd 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -599,6 +599,16 @@ service.
 
 
 int
+CellComparator.compare(Cella,
+  Cellb)
+
+
+int
+CellComparator.RowComparator.compare(Cella,
+  Cellb)
+
+
+int
 KeyValue.MetaComparator.compare(Cellleft,
   Cellright)
 Deprecated.
@@ -623,16 +633,6 @@ service.
 
 
 
-int
-CellComparator.compare(Cella,
-  Cellb)
-
-
-int
-CellComparator.RowComparator.compare(Cella,
-  Cellb)
-
-
 private int
 CellComparator.compare(Cella,
   Cellb,
@@ -808,37 +808,37 @@ service.
 
 
 int
-KeyValue.KVComparator.compareRows(Cellleft,
-  Cellright)
-Deprecated.
-
-
-
-int
 CellComparator.compareRows(Cellleft,
   Cellright)
 Compares the rows of the left and right cell.
 
 
-
+
 int
 CellComparator.MetaCellComparator.compareRows(Cellleft,
   Cellright)
 
-
+
 int
-KeyValue.KVComparator.compareTimestamps(Cellleft,
-  Cellright)
+KeyValue.KVComparator.compareRows(Cellleft,
+  Cellright)
 Deprecated.
 
 
-
+
 static int
 CellComparator.compareTimestamps(Cellleft,
   Cellright)
 Compares cell's timestamps in DESCENDING order.
 
 
+
+int
+KeyValue.KVComparator.compareTimestamps(Cellleft,
+  Cellright)
+Deprecated.
+
+
 
 static int
 CellComparator.compareValue(Cellcell,
@@ -1644,17 +1644,17 @@ service.
 
 
 
-Increment
-Increment.add(Cellcell)
-Add the specified KeyValue to this operation.
-
-
-
 Append
 Append.add(Cellcell)
 Add column and value to this Append operation.
 
 
+
+Increment
+Increment.add(Cellcell)
+Add the specified KeyValue to this operation.
+
+
 
 Delete
 Delete.addDeleteMarker(Cellkv)
@@ -1743,20 +1743,12 @@ service.
 booleanpartial)
 
 
-Put
-Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
-
-
 Delete
 Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 
-
-Increment
-Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
-
 
-Append
-Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+Put
+Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 
 
 Mutation
@@ -1764,6 +1756,14 @@ service.
 Method for setting the put's familyMap
 
 
+
+Append
+Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+
+
+Increment
+Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+
 
 
 
@@ -1795,10 +1795,10 @@ service.
 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true;
 title="class or interface in java.math">BigDecimal
-BigDecimalColumnInterpreter.getValue(byte[]colFamily,
+http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true;
 title="class or interface in java.lang">Double
+DoubleColumnInterpreter.getValue(byte[]colFamily,
  

[12/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
index 65fa5a0..7a2b4de 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
@@ -266,11 +266,11 @@ service.
 
 
 protected HRegionLocation
-AbstractRegionServerCallable.location
+RegionAdminServiceCallable.location
 
 
 protected HRegionLocation
-RegionAdminServiceCallable.location
+AbstractRegionServerCallable.location
 
 
 
@@ -298,11 +298,11 @@ service.
 
 
 protected HRegionLocation
-AbstractRegionServerCallable.getLocation()
+MultiServerCallable.getLocation()
 
 
 protected HRegionLocation
-MultiServerCallable.getLocation()
+AbstractRegionServerCallable.getLocation()
 
 
 HRegionLocation
@@ -336,37 +336,11 @@ service.
 
 
 HRegionLocation
-HConnection.getRegionLocation(byte[]tableName,
-  byte[]row,
-  booleanreload)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-HRegionLocation
-ConnectionImplementation.getRegionLocation(byte[]tableName,
-  byte[]row,
-  booleanreload)
-
-
-HRegionLocation
-HConnection.getRegionLocation(TableNametableName,
-  byte[]row,
-  booleanreload)
-Deprecated.
-internal method, do not use thru HConnection
-
-
-
-
-HRegionLocation
 ConnectionImplementation.getRegionLocation(TableNametableName,
   byte[]row,
   booleanreload)
 
-
+
 HRegionLocation
 ClusterConnection.getRegionLocation(TableNametableName,
   byte[]row,
@@ -374,18 +348,10 @@ service.
 Find region location hosting passed row
 
 
-
+
 private HRegionLocation
 AsyncProcess.AsyncRequestFutureImpl.getReplicaLocationOrFail(ActionRowaction)
 
-
-HRegionLocation
-HConnection.locateRegion(byte[]regionName)
-Deprecated.
-internal method, do not use thru HConnection
-
-
-
 
 HRegionLocation
 ConnectionImplementation.locateRegion(byte[]regionName)
@@ -398,33 +364,10 @@ service.
 
 
 HRegionLocation
-HConnection.locateRegion(byte[]tableName,
-byte[]row)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-HRegionLocation
-ConnectionImplementation.locateRegion(byte[]tableName,
-byte[]row)
-
-
-HRegionLocation
-HConnection.locateRegion(TableNametableName,
-byte[]row)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-HRegionLocation
 ConnectionImplementation.locateRegion(TableNametableName,
 byte[]row)
 
-
+
 HRegionLocation
 ClusterConnection.locateRegion(TableNametableName,
 byte[]row)
@@ -432,29 +375,6 @@ service.
  lives in.
 
 
-
-HRegionLocation
-HConnection.relocateRegion(byte[]tableName,
-byte[]row)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-HRegionLocation
-ConnectionImplementation.relocateRegion(byte[]tableName,
-byte[]row)
-
-
-HRegionLocation
-HConnection.relocateRegion(TableNametableName,
-byte[]row)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
 
 HRegionLocation
 ConnectionImplementation.relocateRegion(TableNametableName,
@@ -508,60 +428,14 @@ service.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocation
-HConnection.locateRegions(byte[]tableName)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocation
-ConnectionImplementation.locateRegions(byte[]tableName)
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocation
-HConnection.locateRegions(byte[]tableName,
-  booleanuseCache,
-  booleanofflined)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocation
-ConnectionImplementation.locateRegions(byte[]tableName,
-  booleanuseCache,
-  booleanofflined)
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocation

[26/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/book.html
--
diff --git a/book.html b/book.html
index 138c0d6..d107f54 100644
--- a/book.html
+++ b/book.html
@@ -28705,32 +28705,44 @@ It provides a nice overview that applies equally to 
the Apache HBase Project.
 150.8.1. Create Patch
 
-The script dev-support/make_patch.sh has been provided to help you 
adhere to patch-creation guidelines.
-The script has the following syntax:
-
-
-
-$ make_patch.sh [-a] [-p patch_dir]
-
+Use dev-support/submit-patch.py to create patches and optionally, 
upload to jira and update
+reviews on Review Board. Patch name is formatted as (JIRA).(branch 
name).(patch number).patch to
+follow Yetus' naming rules. Use -h flag to know detailed usage 
information. Most useful options
+are:
 
 
 
 
-If you do not pass a patch_dir, the script defaults to 
~/patches/.
-If the patch_dir does not exist, it is created.
+-b BRANCH, --branch BRANCH : Specify base branch for 
generating the diff. If not specified, tracking branch is used. If there is no 
tracking branch, error will be thrown.
 
 
-By default, if an existing patch exists with the JIRA ID, the version of 
the new patch is incremented (HBASE--v3.patch). If the 
-aoption is passed, the version is not 
incremented, but the suffix -addendum is added 
(HBASE--v2-addendum.patch). A second addendum to a given version 
is not supported.
-
-
-Detects whether you have more than one local commit on your branch.
-If you do, the script offers you the chance to run git rebase
--i to squash the changes into a single commit so that it can use git 
format-patch.
-If you decline, the script uses git diff instead.
-The patch is saved in a configurable directory and is ready to be attached to 
your JIRA.
+-jid JIRA_ID, --jira-id JIRA_ID : Jira id of the issue. If 
set, we deduce next patch version from attachments in the jira and also upload 
the new patch. Script will ask for jira username/password for authentication. 
If not set, patch is named branch.patch.
 
 
 
+
+The script builds a new patch, and uses REST API to upload it to the jira 
(if --jira-id is
+specified) and update the review on ReviewBoard (if --skip-review-board not 
specified).
+Remote links in the jira are used to figure out if a review request already 
exists. If no review
+request is present, then creates a new one and populates all required fields 
using jira summary,
+patch description, etc. Also adds this reviews link to the jira.
+
+
+
+Authentication
+
+Since attaching patches on JIRA and creating/changing review request on 
ReviewBoard requires a
+logged in user, the script will prompt you for username and password. To avoid 
the hassle every
+time, set up ~/.apache-creds with login details and encrypt it by 
following the steps in footer
+of scripts help message.
+
+Python dependencies
+
+To install required python dependencies, execute
+pip install -r dev-support/python-requirements.txt from the 
master branch.
+
+
+
 
 Patching Workflow
 
@@ -28744,23 +28756,14 @@ If necessary, squash local commits to merge local 
commits into a single one firs
 See this http://stackoverflow.com/questions/5308816/how-to-use-git-merge-squash;>Stack
 Overflow question for more information about squashing commits.
 
 
-The patch should have the JIRA ID in the name.
-If you are generating from a branch, include the target branch in the filename.
-A common naming scheme for patches is:
-
-
-HBASE-.patch
-
-
+Patch name should be as follows to adhere to Yetus' naming convention.
 
 
-HBASE--0.90.patch # to denote that the patch is against branch 
0.90
+(JIRA).(branch name).(patch number).patch
 
 
-
-
-HBASE--v3.patch   # to denote that this is the third version of 
the patch
-
+
+For eg. HBASE-11625.master.001.patch, HBASE-X.branch-1.2.0005.patch, 
etc.
 
 
 
@@ -28779,10 +28782,7 @@ See reviewboard.
 If you need to revise your patch, leave the previous patch file(s) attached 
to the JIRA, and upload the new one, following the naming conventions in submitting.patches.create.
 Cancel the Patch Available flag and then re-trigger it, by toggling the Patch Available button in JIRA.
 JIRA sorts attached files by the time they were attached, and has no problem 
with multiple attachments with the same name.
-However, at times it is easier to refer to different version of a patch if you 
add -vX, where the X is the version 
(starting with 2).
-
-
-If you need to submit your patch against multiple branches, rather than 
just master, name each version of the patch with the branch it is for, 
following the naming conventions in submitting.patches.create.
+However, at times it is easier to increment patch number in the patch name.
 
 
 
@@ -33640,7 +33640,7 @@ The server will return cellblocks compressed using this 
same compressor as long
 
 
 Version 2.0.0-SNAPSHOT
-Last updated 2016-04-11 14:30:43 UTC

[47/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/index-all.html
--
diff --git a/apidocs/index-all.html b/apidocs/index-all.html
index 891302b..197a32b 100644
--- a/apidocs/index-all.html
+++ b/apidocs/index-all.html
@@ -1020,30 +1020,6 @@
 
 cleanupJob(JobContext)
 - Method in class org.apache.hadoop.hbase.mapreduce.TableOutputCommitter
 
-clearCaches(ServerName)
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-internal method, do not use thru HConnection
-
-
-clearRegionCache()
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-internal method, do not use through HConnection
-
-
-clearRegionCache(TableName)
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-internal method, do not use through HConnection
-
-
-clearRegionCache(byte[])
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-internal method, do not use through HConnection
-
-
 Client 
- Class in org.apache.hadoop.hbase.rest.client
 
 A wrapper around HttpClient which provides some useful 
function and
@@ -2941,12 +2917,6 @@
 
 delete(ListDelete)
 - Method in class org.apache.hadoop.hbase.rest.client.RemoteHTable
 
-deleteCachedRegionLocation(HRegionLocation)
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-internal method, do not use thru HConnection
-
-
 deleteColumn(TableName,
 byte[]) - Method in interface org.apache.hadoop.hbase.client.Admin
 
 Deprecated.
@@ -4431,23 +4401,6 @@
 
 Retrieve an Admin implementation to administer an HBase 
cluster.
 
-getAdmin()
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-Retrieve an Admin implementation to administer an HBase 
cluster.
-
-getAdmin(ServerName)
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-internal method, do not use thru HConnection
-
-
-getAdmin(ServerName,
 boolean) - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-You can pass master flag but nothing special is 
done.
-
-
 getAdmin()
 - Method in class org.apache.hadoop.hbase.mapreduce.TableInputFormatBase
 
 Allows subclasses to get the Admin.
@@ -4601,12 +4554,6 @@
 
 getCipherProvider(Configuration)
 - Static method in class org.apache.hadoop.hbase.io.crypto.Encryption
 
-getClient(ServerName)
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-internal method, do not use thru HConnection
-
-
 getClientAckTime()
 - Method in class org.apache.hadoop.hbase.ProcedureInfo
 
 getClientPort()
 - Method in class org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster
@@ -4732,10 +4679,6 @@
 
 getConfiguration()
 - Method in interface org.apache.hadoop.hbase.client.Connection
 
-getConfiguration()
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-
 getConfiguration()
 - Method in interface org.apache.hadoop.hbase.client.Table
 
 Returns the Configuration object used by this 
instance.
@@ -4800,12 +4743,6 @@
 
 Returns the current key.
 
-getCurrentNrHRS()
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-This method will be changed from public to package 
protected.
-
-
 getCurrentValue()
 - Method in class org.apache.hadoop.hbase.mapreduce.TableRecordReader
 
 Returns the current value.
@@ -5081,30 +5018,6 @@
 
 getHostPort()
 - Method in class org.apache.hadoop.hbase.ServerName
 
-getHTableDescriptor(TableName)
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-internal method, do not use through HConnection
-
-
-getHTableDescriptor(byte[])
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-internal method, do not use through HConnection
-
-
-getHTableDescriptors(ListString)
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-since 0.96.0
-
-
-getHTableDescriptorsByTableName(ListTableName)
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-Use Admin.getTableDescriptor(TableName)
 instead.
-
-
 getHTableMultiplexerStatus()
 - Method in class org.apache.hadoop.hbase.client.HTableMultiplexer
 
 getHttpClient()
 - Method in class org.apache.hadoop.hbase.rest.client.Client
@@ -5153,12 +5066,6 @@
 
 Get the expected length for the initialization vector
 
-getKeepAliveMasterService()
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-Since 0.96.0
-
-
 getKeepDeletedCells()
 - Method in class org.apache.hadoop.hbase.HColumnDescriptor
 
 getKey()
 - Method in class org.apache.hadoop.hbase.io.crypto.Context
@@ -5283,12 +5190,6 @@
 
 Map of families to all versions of its qualifiers and 
values.
 
-getMaster()
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-internal method, do not use thru HConnection
-
-
 getMaster()
 - Method in 

[46/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
index f97ce1b..a88dd52 100644
--- a/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
+++ b/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
@@ -101,7 +101,7 @@
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class HColumnDescriptor
+public class HColumnDescriptor
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableHColumnDescriptor
 An HColumnDescriptor contains information about a column 
family such as the
@@ -810,7 +810,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 COMPRESSION
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String COMPRESSION
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String COMPRESSION
 See Also:Constant
 Field Values
 
 
@@ -820,7 +820,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 COMPRESSION_COMPACT
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String COMPRESSION_COMPACT
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String COMPRESSION_COMPACT
 See Also:Constant
 Field Values
 
 
@@ -830,7 +830,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 ENCODE_ON_DISK
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ENCODE_ON_DISK
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String ENCODE_ON_DISK
 See Also:Constant
 Field Values
 
 
@@ -840,7 +840,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 DATA_BLOCK_ENCODING
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DATA_BLOCK_ENCODING
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DATA_BLOCK_ENCODING
 See Also:Constant
 Field Values
 
 
@@ -850,7 +850,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 BLOCKCACHE
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String BLOCKCACHE
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String BLOCKCACHE
 Key for the BLOCKCACHE attribute.
  A more exact name would be CACHE_DATA_ON_READ because this flag sets whether 
or not we
  cache DATA blocks.  We always cache INDEX and BLOOM blocks; caching these 
blocks cannot be
@@ -864,7 +864,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 CACHE_DATA_ON_WRITE
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CACHE_DATA_ON_WRITE
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CACHE_DATA_ON_WRITE
 See Also:Constant
 Field Values
 
 
@@ -874,7 +874,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 CACHE_INDEX_ON_WRITE
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CACHE_INDEX_ON_WRITE
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CACHE_INDEX_ON_WRITE
 See Also:Constant
 Field Values
 
 
@@ -884,7 +884,7 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/Comparabl
 
 
 CACHE_BLOOMS_ON_WRITE
-public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String CACHE_BLOOMS_ON_WRITE
+public static finalhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String 

[20/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/Abortable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/Abortable.html 
b/devapidocs/org/apache/hadoop/hbase/Abortable.html
index 107b93e..ff220a0 100644
--- a/devapidocs/org/apache/hadoop/hbase/Abortable.html
+++ b/devapidocs/org/apache/hadoop/hbase/Abortable.html
@@ -87,7 +87,7 @@
 
 
 All Known Subinterfaces:
-Admin, ClusterConnection, Connection, HConnection, MasterServices, OnlineRegions, RegionServerServices, Server
+Admin, ClusterConnection, Connection, MasterServices, OnlineRegions, RegionServerServices, Server
 
 
 
 All Known Implementing Classes:



[15/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
index 4d1be00..be59e21 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
@@ -555,13 +555,13 @@ service.
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
   HColumnDescriptorcolumnFamily)
 Deprecated.
 As of release 2.0.0, this will be removed in HBase 3.0.0
  (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645).
- Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+ Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
 
 
 
@@ -575,21 +575,23 @@ service.
 
 
 void
-MasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
   HColumnDescriptorcolumnFamily)
 Deprecated.
 As of release 2.0.0, this will be removed in HBase 3.0.0
  (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645).
- Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+ Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
 
 
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
-  HColumnDescriptorcolumnFamily)
+  HColumnDescriptorcolumnFamily)
+Called after the new column family has been created.
+
 
 
 void
@@ -599,21 +601,19 @@ service.
 
 
 void
-MasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
-  HColumnDescriptorcolumnFamily)
-Called after the new column family has been created.
-
+  HColumnDescriptorcolumnFamily)
 
 
 void
-BaseMasterObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx,
 TableNametableName,
 HColumnDescriptorcolumnFamily)
 Deprecated.
 As of release 2.0.0, this will be removed in HBase 3.0.0
  (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). Use
- BaseMasterObserver.postCompletedAddColumnFamilyAction(ObserverContext,
 TableName, HColumnDescriptor).
+ MasterObserver.postCompletedAddColumnFamilyAction(ObserverContext,
 TableName, HColumnDescriptor).
 
 
 
@@ -627,21 +627,23 @@ service.
 
 
 void
-MasterObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx,
 TableNametableName,
 HColumnDescriptorcolumnFamily)
 Deprecated.
 As of release 2.0.0, this will be removed in HBase 3.0.0
  (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). Use
- MasterObserver.postCompletedAddColumnFamilyAction(ObserverContext,
 TableName, HColumnDescriptor).
+ BaseMasterObserver.postCompletedAddColumnFamilyAction(ObserverContext,
 TableName, HColumnDescriptor).
 
 
 
 
 void
-BaseMasterObserver.postCompletedAddColumnFamilyAction(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.postCompletedAddColumnFamilyAction(ObserverContextMasterCoprocessorEnvironmentctx,
 TableNametableName,
-HColumnDescriptorcolumnFamily)
+HColumnDescriptorcolumnFamily)
+Called after the new column family has been created.
+
 
 
 void
@@ -651,17 +653,17 @@ service.
 
 
 void
-MasterObserver.postCompletedAddColumnFamilyAction(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterObserver.postCompletedAddColumnFamilyAction(ObserverContextMasterCoprocessorEnvironmentctx,
   

[24/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 40d3741..9d52840 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 2007 - 2016 The Apache Software Foundation
 
-  File: 1772,
- Errors: 11639,
+  File: 1771,
+ Errors: 11536,
  Warnings: 0,
  Infos: 0
   
@@ -130,7 +130,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.LogRoller.java;>org/apache/hadoop/hbase/regionserver/LogRoller.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.RowTooBigException.java;>org/apache/hadoop/hbase/regionserver/RowTooBigException.java
 
 
   0
@@ -139,12 +139,12 @@ under the License.
   0
 
 
-  5
+  0
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.RowTooBigException.java;>org/apache/hadoop/hbase/regionserver/RowTooBigException.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.LogRoller.java;>org/apache/hadoop/hbase/regionserver/LogRoller.java
 
 
   0
@@ -153,7 +153,7 @@ under the License.
   0
 
 
-  0
+  5
 
   
   
@@ -354,7 +354,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.MultiAction.java;>org/apache/hadoop/hbase/client/MultiAction.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.ResultStatsUtil.java;>org/apache/hadoop/hbase/client/ResultStatsUtil.java
 
 
   0
@@ -363,12 +363,12 @@ under the License.
   0
 
 
-  3
+  0
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.ResultStatsUtil.java;>org/apache/hadoop/hbase/client/ResultStatsUtil.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.client.MultiAction.java;>org/apache/hadoop/hbase/client/MultiAction.java
 
 
   0
@@ -377,7 +377,7 @@ under the License.
   0
 
 
-  0
+  3
 
   
   
@@ -452,7 +452,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.HealthCheckChore.java;>org/apache/hadoop/hbase/HealthCheckChore.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.io.crypto.Encryption.java;>org/apache/hadoop/hbase/io/crypto/Encryption.java
 
 
   0
@@ -461,12 +461,12 @@ under the License.
   0
 
 
-  0
+  53
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.io.crypto.Encryption.java;>org/apache/hadoop/hbase/io/crypto/Encryption.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.HealthCheckChore.java;>org/apache/hadoop/hbase/HealthCheckChore.java
 
 
   0
@@ -475,7 +475,7 @@ under the License.
   0
 
 
-  53
+  0
 
   
   
@@ -494,7 +494,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.io.ByteArrayOutputStream.java;>org/apache/hadoop/hbase/io/ByteArrayOutputStream.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.security.visibility.VisibilityConstants.java;>org/apache/hadoop/hbase/security/visibility/VisibilityConstants.java
 
 
   0
@@ -503,12 +503,12 @@ under the License.
   0
 
 

[40/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.html 
b/apidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.html
index cada150..17b52b0 100644
--- a/apidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.html
+++ b/apidocs/org/apache/hadoop/hbase/client/HTableMultiplexer.html
@@ -35,7 +35,7 @@
 
 
 
-Prev 
Class
+Prev 
Class
 Next 
Class
 
 
@@ -486,7 +486,7 @@ publicboolean
 
-Prev 
Class
+Prev 
Class
 Next 
Class
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Admin.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
index 72528d4..41ea0af 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Admin.html
@@ -105,13 +105,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Admin
-HConnection.getAdmin()
-Deprecated.
-Retrieve an Admin implementation to administer an HBase 
cluster.
-
-
-
-Admin
 Connection.getAdmin()
 Retrieve an Admin implementation to administer an HBase 
cluster.
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Connection.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
index 6fac339..8f3fa15 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Connection.html
@@ -103,23 +103,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Uses of Connection in org.apache.hadoop.hbase.client
-
-Subinterfaces of Connection in org.apache.hadoop.hbase.client
-
-Modifier and Type
-Interface and Description
-
-
-
-interface
-HConnection
-Deprecated.
-in favor of Connection 
and ConnectionFactory
-
-
-
-
-
 
 Methods in org.apache.hadoop.hbase.client
 that return Connection
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
index f347eea..edbc8bd 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
@@ -138,19 +138,19 @@ the order they are declared.
 
 
 
-Get
-Get.setConsistency(Consistencyconsistency)
-
-
 Scan
 Scan.setConsistency(Consistencyconsistency)
 
-
+
 Query
 Query.setConsistency(Consistencyconsistency)
 Sets the consistency level for this operation
 
 
+
+Get
+Get.setConsistency(Consistencyconsistency)
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
index e648e80..a225a9c 100644
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
+++ b/apidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
@@ -199,15 +199,15 @@ the order they are declared.
 Append.setDurability(Durabilityd)
 
 
+Increment
+Increment.setDurability(Durabilityd)
+
+
 Mutation
 Mutation.setDurability(Durabilityd)
 Set the durability for this mutation
 
 
-
-Increment
-Increment.setDurability(Durabilityd)
-
 
 Delete
 Delete.setDurability(Durabilityd)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/client/class-use/HConnection.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/class-use/HConnection.html 
b/apidocs/org/apache/hadoop/hbase/client/class-use/HConnection.html
deleted file mode 100644
index 5ec019c..000
--- a/apidocs/org/apache/hadoop/hbase/client/class-use/HConnection.html
+++ /dev/null
@@ -1,115 +0,0 @@
-http://www.w3.org/TR/html4/loose.dtd;>
-
-
-
-
-Uses of Interface org.apache.hadoop.hbase.client.HConnection (Apache 
HBase 2.0.0-SNAPSHOT API)
-
-
-
-
-
-
-JavaScript 

[02/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.html 
b/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.html
index 24b6038..1cde784 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/ConnectionImplementation.html
@@ -95,7 +95,7 @@
 
 
 All Implemented Interfaces:
-http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable, http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true;
 title="class or interface in java.lang">AutoCloseable, Abortable, ClusterConnection, Connection, HConnection
+http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable, http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true;
 title="class or interface in java.lang">AutoCloseable, Abortable, ClusterConnection, Connection
 
 
 Direct Known Subclasses:
@@ -104,7 +104,7 @@
 
 
 @InterfaceAudience.Private
- class ConnectionImplementation
+ class ConnectionImplementation
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements ClusterConnection, http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable
 Main implementation of Connection 
and ClusterConnection interfaces.
@@ -332,11 +332,11 @@ implements 
-
+
 
 
-Fields inherited from interfaceorg.apache.hadoop.hbase.client.HConnection
-HBASE_CLIENT_CONNECTION_IMPL
+Fields inherited from interfaceorg.apache.hadoop.hbase.client.ClusterConnection
+HBASE_CLIENT_CONNECTION_IMPL
 
 
 
@@ -413,70 +413,59 @@ implements 
 void
-clearRegionCache(byte[]tableName)
-
-
-void
 clearRegionCache(TableNametableName)
 Allows flushing the region cache of all locations that 
pertain to
  tableName
 
 
-
+
 void
 clearRegionCache(TableNametableName,
 byte[]row)
 
-
+
 void
 close()
 
-
+
 private void
 closeMaster()
 Immediate close of the shared master.
 
 
-
+
 private void
 closeMasterService(ConnectionImplementation.MasterServiceStatemss)
 
-
+
 private void
 closeZooKeeperWatcher()
 
-
+
 protected AsyncProcess
 createAsyncProcess(org.apache.hadoop.conf.Configurationconf)
 
-
+
 void
 deleteCachedRegionLocation(HRegionLocationlocation)
 Deletes cached locations for the specific region.
 
 
-
+
 protected void
 finalize()
 Close the connection for good.
 
 
-
+
 Admin
 getAdmin()
 Retrieve an Admin implementation to administer an HBase 
cluster.
 
 
-
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-getAdmin(ServerNameserverName)
-Establishes a connection to the region server at the 
specified address.
-
-
 
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-getAdmin(ServerNameserverName,
-booleanmaster)
+getAdmin(ServerNameserverName)
 Establishes a connection to the region server at the 
specified address.
 
 
@@ -544,42 +533,6 @@ implements getCurrentNrHRS()
 
 
-HTableDescriptor
-getHTableDescriptor(byte[]tableName)
-Deprecated.
-Use Admin.getTableDescriptor(org.apache.hadoop.hbase.TableName)
-  instead
-
-
-
-
-HTableDescriptor
-getHTableDescriptor(TableNametableName)
-Deprecated.
-Use Admin.getTableDescriptor(org.apache.hadoop.hbase.TableName)
-  instead
-
-
-
-
-HTableDescriptor[]
-getHTableDescriptors(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnames)
-Deprecated.
-Use
-  Admin.getTableDescriptorsByTableName(java.util.List)
-  instead
-
-
-
-
-HTableDescriptor[]
-getHTableDescriptorsByTableName(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableNametableNames)
-Deprecated.
-Use Admin.getTableDescriptorsByTableName(java.util.List)
 instead
-
-
-
-
 MasterKeepAliveConnection
 getKeepAliveMasterService()
 This function allows HBaseAdmin and potentially others to 
get a shared MasterService
@@ -617,28 +570,6 @@ implements getNumberOfCachedRegionLocations(TableNametableName)
 
 
-boolean
-getRegionCachePrefetch(byte[]tableName)
-Deprecated.
-always return false since 0.99
-
-
-
-
-boolean
-getRegionCachePrefetch(TableNametableName)
-Deprecated.
-always return false since 0.99
-
-
-
-
-HRegionLocation
-getRegionLocation(byte[]tableName,
-  byte[]row,
-  

[23/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/coc.html
--
diff --git a/coc.html b/coc.html
index 2fa9642..1928243 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Code of Conduct Policy
@@ -331,7 +331,7 @@ For flagrant violations requiring a firm response the PMC 
may opt to skip early
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-05-27
+  Last Published: 
2016-05-31
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index 3f8973c..0e1b8b0 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -673,7 +673,7 @@ Now your HBase server is running, start 
coding and build that next
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-05-27
+  Last Published: 
2016-05-31
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index 3dabff2..a204238 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependencies
 
@@ -518,7 +518,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-05-27
+  Last Published: 
2016-05-31
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 73058a5..2892109 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Reactor Dependency Convergence
 
@@ -1703,7 +1703,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-05-27
+  Last Published: 
2016-05-31
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index eaca8aa..76a6310 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Dependency Information
 
@@ -312,7 +312,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-05-27
+  Last Published: 
2016-05-31
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index 04eae3e..f6b8fe5 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependency Management
 
@@ -798,7 +798,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-05-27
+  Last Published: 
2016-05-31
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/allclasses-frame.html
--
diff --git a/devapidocs/allclasses-frame.html b/devapidocs/allclasses-frame.html
index b83d13c..4cb8057 100644
--- a/devapidocs/allclasses-frame.html
+++ b/devapidocs/allclasses-frame.html
@@ -792,7 +792,6 @@
 HBaseSaslRpcServer.SaslGssCallbackHandler
 HBaseSnapshotException
 HColumnDescriptor
-HConnection
 HConstants
 HConstants.Modify
 HConstants.OperationStatusCode

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/allclasses-noframe.html
--
diff --git 

[08/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 137d413..bd0dec9 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -248,11 +248,11 @@
 
 
 ServerName
-SplitLogTask.getServerName()
+Server.getServerName()
 
 
 ServerName
-Server.getServerName()
+SplitLogTask.getServerName()
 
 
 static ServerName
@@ -698,16 +698,16 @@
 
 
 
-void
-MetaCache.cacheLocation(TableNametableName,
+private void
+ConnectionImplementation.cacheLocation(TableNametableName,
   ServerNamesource,
   HRegionLocationlocation)
 Put a newly discovered HRegionLocation into the cache.
 
 
 
-private void
-ConnectionImplementation.cacheLocation(TableNametableName,
+void
+MetaCache.cacheLocation(TableNametableName,
   ServerNamesource,
   HRegionLocationlocation)
 Put a newly discovered HRegionLocation into the cache.
@@ -736,67 +736,59 @@
 
 
 void
-HConnection.clearCaches(ServerNamesn)
-Deprecated.
-internal method, do not use thru HConnection
-
-
-
-
-void
 ConnectionImplementation.clearCaches(ServerNameserverName)
 
-
+
 void
 ClusterConnection.clearCaches(ServerNamesn)
 Clear any caches that pertain to server name 
sn.
 
 
-
+
 void
 Admin.closeRegion(ServerNamesn,
   HRegionInfohri)
 Close a region.
 
 
-
+
 void
 HBaseAdmin.closeRegion(ServerNamesn,
   HRegionInfohri)
 
-
+
 private void
 HBaseAdmin.compact(ServerNamesn,
   HRegionInfohri,
   booleanmajor,
   byte[]family)
 
-
+
 void
 Admin.compactRegionServer(ServerNamesn,
   booleanmajor)
 Compact all regions on the region server
 
 
-
+
 void
 HBaseAdmin.compactRegionServer(ServerNamesn,
   booleanmajor)
 Compact all regions on the region server
 
 
-
+
 CoprocessorRpcChannel
 Admin.coprocessorService(ServerNamesn)
 Creates and returns a RpcChannel instance
  connected to the passed region server.
 
 
-
+
 CoprocessorRpcChannel
 HBaseAdmin.coprocessorService(ServerNamesn)
 
-
+
 protected MultiServerCallableRow
 AsyncProcess.createCallable(ServerNameserver,
 TableNametableName,
@@ -804,7 +796,7 @@
 Create a callable.
 
 
-
+
 private http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 AsyncProcess.AsyncRequestFutureImpl.createLog(intnumAttempt,
   intfailureCount,
@@ -817,7 +809,7 @@
   intfailed,
   intstopped)
 
-
+
 static ClusterConnection
 ConnectionUtils.createShortCircuitConnection(org.apache.hadoop.conf.Configurationconf,
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ExecutorServicepool,
@@ -829,21 +821,13 @@
  deserialization, networking, etc..) when talking to a local server.
 
 
-
+
 protected void
 AsyncProcess.decTaskCounters(http://docs.oracle.com/javase/7/docs/api/java/util/Collection.html?is-external=true;
 title="class or interface in 
java.util">Collectionbyte[]regions,
   ServerNamesn)
 Decrements the counters for a given region and the region 
server.
 
 
-
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-HConnection.getAdmin(ServerNameserverName)
-Deprecated.
-internal method, do not use thru HConnection
-
-
-
 
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
 ConnectionImplementation.getAdmin(ServerNameserverName)
@@ -855,106 +839,76 @@
 
 
 
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-HConnection.getAdmin(ServerNameserverName,
-booleangetMaster)
-Deprecated.
-You can pass master flag but nothing special is 
done.
-
-
-
-
-org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-ConnectionImplementation.getAdmin(ServerNameserverName,
-booleanmaster)
-
-
 private http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
 AsyncProcess.AsyncRequestFutureImpl.getBackoff(ServerNameserver,
 byte[]regionName)
 
 
 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
-HConnection.getClient(ServerNameserverName)
-Deprecated.
-internal method, do not use thru HConnection
-
-
-
-

[21/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 4247278..bb5d035 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -3701,8 +3701,6 @@
 
 AuthUtil() 
- Constructor for class org.apache.hadoop.hbase.AuthUtil
 
-autoFlush
 - Variable in class org.apache.hadoop.hbase.client.HTable
-
 avail 
- Variable in class org.apache.hadoop.hbase.quotas.RateLimiter
 
 avail
 - Static variable in class org.apache.hadoop.hbase.util.UnsafeAvailChecker
@@ -9260,12 +9258,6 @@
 
 clearCaches(ServerName)
 - Method in class org.apache.hadoop.hbase.client.ConnectionImplementation
 
-clearCaches(ServerName)
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-internal method, do not use thru HConnection
-
-
 clearChildZNodes()
 - Method in class org.apache.hadoop.hbase.procedure.ZKProcedureUtil
 
 clearChunks()
 - Method in class org.apache.hadoop.hbase.regionserver.MemStoreChunkPool
@@ -9329,26 +9321,6 @@
 
 clearRegionCache(TableName)
 - Method in class org.apache.hadoop.hbase.client.ConnectionImplementation
 
-clearRegionCache(byte[])
 - Method in class org.apache.hadoop.hbase.client.ConnectionImplementation
-
-clearRegionCache()
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-internal method, do not use through HConnection
-
-
-clearRegionCache(TableName)
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-internal method, do not use through HConnection
-
-
-clearRegionCache(byte[])
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-internal method, do not use through HConnection
-
-
 clearRegionCache()
 - Method in class org.apache.hadoop.hbase.client.HTable
 
 Explicitly clears the region cache to fetch the latest 
value from META.
@@ -10306,7 +10278,7 @@
 Contacts a region server and waits up to timeout ms
  to close the region.
 
-closeRegionSilentlyAndWait(HConnection,
 ServerName, HRegionInfo) - Static method in class 
org.apache.hadoop.hbase.util.HBaseFsckRepair
+closeRegionSilentlyAndWait(Connection,
 ServerName, HRegionInfo) - Static method in class 
org.apache.hadoop.hbase.util.HBaseFsckRepair
 
 Contacts a region server and waits up to 
hbase.hbck.close.timeout ms
  (default 120s) to close the region.
@@ -13387,7 +13359,7 @@
 
 State of the MasterService connection/setup.
 
-ConnectionImplementation.MasterServiceState(HConnection)
 - Constructor for class org.apache.hadoop.hbase.client.ConnectionImplementation.MasterServiceState
+ConnectionImplementation.MasterServiceState(Connection)
 - Constructor for class org.apache.hadoop.hbase.client.ConnectionImplementation.MasterServiceState
 
 ConnectionImplementation.MasterServiceStubMaker - 
Class in org.apache.hadoop.hbase.client
 
@@ -13433,6 +13405,10 @@
 
 connections
 - Variable in class org.apache.hadoop.hbase.util.ConnectionCache
 
+connections
 - Variable in class org.apache.hadoop.hbase.util.MultiHConnection
+
+connectionsLock
 - Variable in class org.apache.hadoop.hbase.util.MultiHConnection
+
 connectionTimeout
 - Variable in class org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster
 
 ConnectionUtils - Class in org.apache.hadoop.hbase.client
@@ -15113,7 +15089,7 @@
 
 createClusterConnection()
 - Method in class org.apache.hadoop.hbase.regionserver.HRegionServer
 
-Create a 'smarter' HConnection, one that is capable of 
by-passing RPC if the request is to
+Create a 'smarter' Connection, one that is capable of 
by-passing RPC if the request is to
  the local server.
 
 createCompaction()
 - Method in class org.apache.hadoop.hbase.regionserver.DateTieredStoreEngine
@@ -18982,12 +18958,6 @@
 
 deleteCachedRegionLocation(HRegionLocation)
 - Method in class org.apache.hadoop.hbase.client.ConnectionImplementation
 
-deleteCachedRegionLocation(HRegionLocation)
 - Method in interface org.apache.hadoop.hbase.client.HConnection
-
-Deprecated.
-internal method, do not use thru HConnection
-
-
 deleteCellVisTags
 - Variable in class org.apache.hadoop.hbase.security.visibility.VisibilityController.DeleteVersionVisibilityExpressionFilter
 
 deleteCellVisTagsFormat
 - Variable in class org.apache.hadoop.hbase.security.visibility.VisibilityController.DeleteVersionVisibilityExpressionFilter
@@ -20190,6 +20160,8 @@
 
 Execute a list of Put/Delete mutations.
 
+doBatchWithCallback(List?
 extends Row, Object[], Batch.CallbackR, ClusterConnection, 
ExecutorService, TableName) - Static method in class 
org.apache.hadoop.hbase.client.HTable
+
 doBulkLoad(Path,
 Admin, Table, RegionLocator) - Method in class 
org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles
 
 Perform a bulk load of the given directory into the given
@@ -24987,7 +24959,7 @@
 
 Puts the specified HRegionInfo into META with replica 
related columns
 
-fixMultiAssignment(HConnection,

[39/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 24328e2..87c63f2 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -106,78 +106,78 @@
 MultipleColumnPrefixFilter.filterColumn(Cellcell)
 
 
-abstract Filter.ReturnCode
-Filter.filterKeyValue(Cellv)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+Filter.ReturnCode
+MultiRowRangeFilter.filterKeyValue(Cellignored)
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cellcell)
+DependentColumnFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterKeyValue(Cellv)
+RandomRowFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-PrefixFilter.filterKeyValue(Cellv)
+ColumnPrefixFilter.filterKeyValue(Cellcell)
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cellv)
+SkipFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterKeyValue(Cellv)
+InclusiveStopFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterKeyValue(Cellv)
+WhileMatchFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cellkv)
+SingleColumnValueFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-FilterList.filterKeyValue(Cellc)
+QualifierFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cellv)
+FuzzyRowFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cellv)
+KeyOnlyFilter.filterKeyValue(Cellignored)
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cellignored)
+PrefixFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cellc)
+MultipleColumnPrefixFilter.filterKeyValue(Cellkv)
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterKeyValue(Cellc)
+FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cellv)
+Deprecated.
+
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cellv)
+PageFilter.filterKeyValue(Cellignored)
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cellkv)
+ColumnCountGetFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cellv)
+FamilyFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterKeyValue(Cellc)
+TimestampsFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
@@ -185,29 +185,29 @@
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cellv)
+FilterList.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-QualifierFilter.filterKeyValue(Cellv)
+FirstKeyOnlyFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cellv)
+ColumnPaginationFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterKeyValue(Cellignored)
+ColumnRangeFilter.filterKeyValue(Cellkv)
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cellv)
-Deprecated.
-
+ValueFilter.filterKeyValue(Cellv)
 
 
-Filter.ReturnCode
-PageFilter.filterKeyValue(Cellignored)
+abstract Filter.ReturnCode
+Filter.filterKeyValue(Cellv)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 static Filter.ReturnCode

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index af620c0..02c305c 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -148,19 +148,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Get
-Get.setFilter(Filterfilter)
-
-
 Scan
 Scan.setFilter(Filterfilter)
 
-
+
 Query
 Query.setFilter(Filterfilter)
 Apply the specified server-side filter when performing the 
Query.
 
 
+
+Get
+Get.setFilter(Filterfilter)
+
 
 
 
@@ -382,83 +382,83 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
+DependentColumnFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
 
 
 static Filter

[11/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
index c3a4c4d..6b0d2ca 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
@@ -587,74 +587,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HTableDescriptor
-HConnection.getHTableDescriptor(byte[]tableName)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-HTableDescriptor
-ConnectionImplementation.getHTableDescriptor(byte[]tableName)
-Deprecated.
-Use Admin.getTableDescriptor(org.apache.hadoop.hbase.TableName)
-  instead
-
-
-
-
-HTableDescriptor
-HConnection.getHTableDescriptor(TableNametableName)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-HTableDescriptor
-ConnectionImplementation.getHTableDescriptor(TableNametableName)
-Deprecated.
-Use Admin.getTableDescriptor(org.apache.hadoop.hbase.TableName)
-  instead
-
-
-
-
-HTableDescriptor[]
-HConnection.getHTableDescriptors(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtableNames)
-Deprecated.
-since 0.96.0
-
-
-
-
-HTableDescriptor[]
-ConnectionImplementation.getHTableDescriptors(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnames)
-Deprecated.
-Use
-  Admin.getTableDescriptorsByTableName(java.util.List)
-  instead
-
-
-
-
-HTableDescriptor[]
-HConnection.getHTableDescriptorsByTableName(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableNametableNames)
-Deprecated.
-Use Admin.getTableDescriptor(TableName)
 instead.
-
-
-
-
-HTableDescriptor[]
-ConnectionImplementation.getHTableDescriptorsByTableName(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableNametableNames)
-Deprecated.
-Use Admin.getTableDescriptorsByTableName(java.util.List)
 instead
-
-
-
-
-HTableDescriptor
 Table.getTableDescriptor()
 Gets the table descriptor for 
this table.
 
@@ -689,8 +621,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 (package private) static HTableDescriptor
-HBaseAdmin.getTableDescriptor(TableNametableName,
-HConnectionconnection,
+HBaseAdmin.getTableDescriptor(TableNametableName,
+Connectionconnection,
 RpcRetryingCallerFactoryrpcCallerFactory,
 RpcControllerFactoryrpcControllerFactory,
 intoperationTimeout,
@@ -740,24 +672,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HTableDescriptor[]
-HConnection.listTables()
-Deprecated.
-Use Admin.listTables()
 instead.
-
-
-
-
-HTableDescriptor[]
 HBaseAdmin.listTables()
 
-
-HTableDescriptor[]
-ConnectionImplementation.listTables()
-Deprecated.
-Use Admin.listTables()
 instead
-
-
-
 
 HTableDescriptor[]
 Admin.listTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
@@ -1088,9 +1004,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-BaseMasterObserver.postCloneSnapshot(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.postCloneSnapshot(ObserverContextMasterCoprocessorEnvironmentctx,
   
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionsnapshot,
-  HTableDescriptorhTableDescriptor)
+  HTableDescriptorhTableDescriptor)
+Called after a snapshot clone operation has been 
requested.
+
 
 
 void
@@ -1100,17 +1018,17 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-MasterObserver.postCloneSnapshot(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterObserver.postCloneSnapshot(ObserverContextMasterCoprocessorEnvironmentctx,
   
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionsnapshot,
-  HTableDescriptorhTableDescriptor)
-Called after a snapshot clone operation has been 
requested.
-
+  

[16/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
index ac565dc..e146e63 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
@@ -234,16 +234,16 @@
 
 
 int
+BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparatorcomparator,
+Cellkey)
+
+
+int
 DataBlockEncoder.EncodedSeeker.compareKey(CellComparatorcomparator,
 Cellkey)
 Compare the given key against the current key
 
 
-
-int
-BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparatorcomparator,
-Cellkey)
-
 
 DataBlockEncoder.EncodedSeeker
 CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
@@ -299,30 +299,30 @@
 
 
 protected CellComparator
-CompoundBloomFilterBase.comparator
-Comparator used to compare Bloom filter keys
+HFileWriterImpl.comparator
+Key comparator.
 
 
 
 protected CellComparator
-HFileWriterImpl.comparator
-Key comparator.
-
+HFile.WriterFactory.comparator
 
 
 private CellComparator
-HFileReaderImpl.comparator
-Key comparator
+HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
+Needed doing lookup on blocks.
 
 
 
 protected CellComparator
-HFile.WriterFactory.comparator
+CompoundBloomFilterBase.comparator
+Comparator used to compare Bloom filter keys
+
 
 
 private CellComparator
-HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
-Needed doing lookup on blocks.
+HFileReaderImpl.comparator
+Key comparator
 
 
 
@@ -344,11 +344,11 @@
 
 
 CellComparator
-HFileReaderImpl.getComparator()
+HFile.Reader.getComparator()
 
 
 CellComparator
-HFile.Reader.getComparator()
+HFileReaderImpl.getComparator()
 
 
 
@@ -503,41 +503,41 @@
 StoreFileWriter.Builder.comparator
 
 
-protected CellComparator
-StripeStoreFlusher.StripeFlushRequest.comparator
-
-
 private CellComparator
-Segment.comparator
-
-
-protected CellComparator
-HRegion.RegionScannerImpl.comparator
+AbstractMemStore.comparator
 
 
-private CellComparator
-ScanInfo.comparator
-
-
 protected CellComparator
 StripeMultiFileWriter.comparator
 
+
+private CellComparator
+Segment.comparator
+
 
 private CellComparator
-AbstractMemStore.comparator
+ScanInfo.comparator
 
 
 private CellComparator
 HStore.comparator
 
 
-private CellComparator
-DefaultStoreFileManager.kvComparator
+protected CellComparator
+HRegion.RegionScannerImpl.comparator
 
 
 protected CellComparator
+StripeStoreFlusher.StripeFlushRequest.comparator
+
+
+protected CellComparator
 KeyValueHeap.KVScannerComparator.kvComparator
 
+
+private CellComparator
+DefaultStoreFileManager.kvComparator
+
 
 private CellComparator
 ScanQueryMatcher.rowComparator
@@ -565,11 +565,11 @@
 
 
 CellComparator
-Store.getComparator()
+StoreFileReader.getComparator()
 
 
-(package private) CellComparator
-StoreFileScanner.getComparator()
+protected CellComparator
+AbstractMemStore.getComparator()
 
 
 protected CellComparator
@@ -582,20 +582,20 @@
 ScanInfo.getComparator()
 
 
-protected CellComparator
-AbstractMemStore.getComparator()
+CellComparator
+Store.getComparator()
 
 
 CellComparator
-StoreFileReader.getComparator()
+KeyValueHeap.KVScannerComparator.getComparator()
 
 
-CellComparator
-HStore.getComparator()
+(package private) CellComparator
+StoreFileScanner.getComparator()
 
 
 CellComparator
-KeyValueHeap.KVScannerComparator.getComparator()
+HStore.getComparator()
 
 
 
@@ -629,6 +629,12 @@
 
 
 
+protected void
+DefaultStoreEngine.createComponents(org.apache.hadoop.conf.Configurationconf,
+Storestore,
+CellComparatorkvComparator)
+
+
 protected abstract void
 StoreEngine.createComponents(org.apache.hadoop.conf.Configurationconf,
 Storestore,
@@ -636,23 +642,17 @@
 Create the StoreEngine's components.
 
 
-
-protected void
-StripeStoreEngine.createComponents(org.apache.hadoop.conf.Configurationconf,
-Storestore,
-CellComparatorcomparator)
-
 
 protected void
-DefaultStoreEngine.createComponents(org.apache.hadoop.conf.Configurationconf,
+DateTieredStoreEngine.createComponents(org.apache.hadoop.conf.Configurationconf,
 Storestore,
 CellComparatorkvComparator)
 
 
 protected void
-DateTieredStoreEngine.createComponents(org.apache.hadoop.conf.Configurationconf,
+StripeStoreEngine.createComponents(org.apache.hadoop.conf.Configurationconf,
 Storestore,
-CellComparatorkvComparator)
+CellComparatorcomparator)
 
 
 private 

[33/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
index a38b7e1..ff52bcc 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
@@ -1032,13 +1032,13 @@
 1024   * by different set of handlers. For 
example, HIGH_QOS tagged methods are
 1025   * handled by high priority 
handlers.
 1026   */
-1027  // normal_QOS  QOS_threshold  
replication_QOS  replay_QOS  admin_QOS  high_QOS
+1027  // normal_QOS  replication_QOS 
 replay_QOS  QOS_threshold  admin_QOS  high_QOS
 1028  public static final int NORMAL_QOS = 
0;
-1029  public static final int QOS_THRESHOLD 
= 10;
-1030  public static final int HIGH_QOS = 
200;
-1031  public static final int 
REPLICATION_QOS = 5;
-1032  public static final int REPLAY_QOS = 
6;
-1033  public static final int ADMIN_QOS = 
100;
+1029  public static final int 
REPLICATION_QOS = 5;
+1030  public static final int REPLAY_QOS = 
6;
+1031  public static final int QOS_THRESHOLD 
= 10;
+1032  public static final int ADMIN_QOS = 
100;
+1033  public static final int HIGH_QOS = 
200;
 1034  public static final int 
SYSTEMTABLE_QOS = HIGH_QOS;
 1035
 1036  /** Directory under /hbase where 
archived hfiles are stored */

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/src-html/org/apache/hadoop/hbase/LocalHBaseCluster.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/LocalHBaseCluster.html 
b/apidocs/src-html/org/apache/hadoop/hbase/LocalHBaseCluster.html
index 6f1524f..528a7e6 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/LocalHBaseCluster.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/LocalHBaseCluster.html
@@ -183,7 +183,7 @@
 175  Configuration config, final int 
index)
 176  throws IOException {
 177// Create each regionserver with its 
own Configuration instance so each has
-178// its HConnection instance rather 
than share (see HBASE_INSTANCES down in
+178// its Connection instance rather 
than share (see HBASE_INSTANCES down in
 179// the guts of ConnectionManager).
 180
 181// Also, create separate 
CoordinatedStateManager instance per Server.
@@ -218,7 +218,7 @@
 210  public JVMClusterUtil.MasterThread 
addMaster(Configuration c, final int index)
 211  throws IOException {
 212// Create each master with its own 
Configuration instance so each has
-213// its HConnection instance rather 
than share (see HBASE_INSTANCES down in
+213// its Connection instance rather 
than share (see HBASE_INSTANCES down in
 214// the guts of ConnectionManager.
 215
 216// Also, create separate 
CoordinatedStateManager instance per Server.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/src-html/org/apache/hadoop/hbase/ServerName.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/ServerName.html 
b/apidocs/src-html/org/apache/hadoop/hbase/ServerName.html
index 723f4d1..4c2a3b3 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/ServerName.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/ServerName.html
@@ -33,392 +33,393 @@
 025import java.io.Serializable;
 026import java.util.ArrayList;
 027import java.util.List;
-028import java.util.regex.Pattern;
-029
-030import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-032import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-033import 
org.apache.hadoop.hbase.protobuf.ProtobufMagic;
-034import 
org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
-035import 
org.apache.hadoop.hbase.util.Addressing;
-036import 
org.apache.hadoop.hbase.util.Bytes;
-037
-038/**
-039 * Instance of an HBase ServerName.
-040 * A server name is used uniquely 
identifying a server instance in a cluster and is made
-041 * of the combination of hostname, port, 
and startcode.  The startcode distingushes restarted
-042 * servers on same hostname and port 
(startcode is usually timestamp of server startup). The
-043 * {@link #toString()} format of 
ServerName is safe to use in the  filesystem and as znode name
-044 * up in ZooKeeper.  Its format is:
-045 * codelt;hostnamegt; 
'{@link #SERVERNAME_SEPARATOR}' lt;portgt;
-046 * '{@link #SERVERNAME_SEPARATOR}' 
lt;startcodegt;/code.
-047 * For example, if hostname is 
codewww.example.org/code, port is 
code1234/code,
-048 * and the startcode for the regionserver 
is code1212121212/code, then
-049 * the {@link #toString()} would be 
codewww.example.org,1234,1212121212/code.
-050 * 
-051 * pYou can obtain a versioned 
serialized form 

[05/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
index 65d17de..74fe4e6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
@@ -100,14 +100,14 @@
 
 
 void
-HMaster.checkTableModifiable(TableNametableName)
-
-
-void
 MasterServices.checkTableModifiable(TableNametableName)
 Check table is modifiable; i.e.
 
 
+
+void
+HMaster.checkTableModifiable(TableNametableName)
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
index 0698107..74ccea7 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
@@ -163,14 +163,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-HMaster.checkTableModifiable(TableNametableName)
-
-
-void
 MasterServices.checkTableModifiable(TableNametableName)
 Check table is modifiable; i.e.
 
 
+
+void
+HMaster.checkTableModifiable(TableNametableName)
+
 
 
 
@@ -186,13 +186,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-EnableTableHandler
-EnableTableHandler.prepare()
-
-
 DisableTableHandler
 DisableTableHandler.prepare()
 
+
+EnableTableHandler
+EnableTableHandler.prepare()
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
 
b/devapidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
index dd7e0ba..f485a70 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/ZooKeeperConnectionException.html
@@ -131,7 +131,7 @@
 
 
 
-HFileArchiveManager(HConnectionconnection,
+HFileArchiveManager(Connectionconnection,
   
org.apache.hadoop.conf.Configurationconf)
 
 
@@ -160,21 +160,13 @@
 
 
 boolean
-HConnection.isMasterRunning()
-Deprecated.
-internal method, do not use thru HConnection
-
-
-
-
-boolean
 ConnectionImplementation.isMasterRunning()
 Deprecated.
 this has been deprecated without a replacement
 
 
 
-
+
 boolean
 ClusterConnection.isMasterRunning()
 Deprecated.
@@ -182,7 +174,7 @@
 
 
 
-
+
 void
 HBaseAdmin.unassign(byte[]regionName,
 booleanforce)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
index ec8892f..8534b8f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Private.html
@@ -7555,7 +7555,7 @@ service.
 class
 ThriftHBaseServiceHandler
 This class is a glue object that connects Thrift RPC calls 
to the HBase client API primarily
- defined in the HTableInterface.
+ defined in the Table interface.
 
 
 
@@ -8041,8 +8041,8 @@ service.
 
 class
 MultiHConnection
-Provides ability to create multiple HConnection instances 
and allows to process a batch of
- actions using HConnection.processBatchCallback()
+Provides ability to create multiple Connection instances 
and allows to process a batch of
+ actions using CHTable.doBatchWithCallback()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html
 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html
index 77859f0..5cdca62 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceAudience.Public.html
+++ 

[43/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html 
b/apidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
index 88596bc..d0e4ac9 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
@@ -386,38 +386,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HTableDescriptor
-HConnection.getHTableDescriptor(byte[]tableName)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-HTableDescriptor
-HConnection.getHTableDescriptor(TableNametableName)
-Deprecated.
-internal method, do not use through HConnection
-
-
-
-
-HTableDescriptor[]
-HConnection.getHTableDescriptors(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtableNames)
-Deprecated.
-since 0.96.0
-
-
-
-
-HTableDescriptor[]
-HConnection.getHTableDescriptorsByTableName(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableNametableNames)
-Deprecated.
-Use Admin.getTableDescriptor(TableName)
 instead.
-
-
-
-
-HTableDescriptor
 Table.getTableDescriptor()
 Gets the table descriptor for 
this table.
 
@@ -454,32 +422,24 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HTableDescriptor[]
-HConnection.listTables()
-Deprecated.
-Use Admin.listTables()
 instead.
-
-
-
-
-HTableDescriptor[]
 Admin.listTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
 List all the userspace tables matching the given 
pattern.
 
 
-
+
 HTableDescriptor[]
 Admin.listTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern,
 booleanincludeSysTables)
 List all the tables matching the given pattern.
 
 
-
+
 HTableDescriptor[]
 Admin.listTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
 List all the userspace tables matching the given regular 
expression.
 
 
-
+
 HTableDescriptor[]
 Admin.listTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex,
 booleanincludeSysTables)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html 
b/apidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
index 9b24adf..433c2ee 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/MasterNotRunningException.html
@@ -64,61 +64,7 @@
 
 Uses of 
Classorg.apache.hadoop.hbase.MasterNotRunningException
 
-
-
-
-
-Packages that use MasterNotRunningException
-
-Package
-Description
-
-
-
-org.apache.hadoop.hbase.client
-
-Provides HBase Client
-
-
-
-
-
-
-
-
-
-
-Uses of MasterNotRunningException in org.apache.hadoop.hbase.client
-
-Methods in org.apache.hadoop.hbase.client
 that throw MasterNotRunningException
-
-Modifier and Type
-Method and Description
-
-
-
-org.apache.hadoop.hbase.client.MasterKeepAliveConnection
-HConnection.getKeepAliveMasterService()
-Deprecated.
-Since 0.96.0
-
-
-
-
-boolean
-HConnection.isMasterRunning()
-Deprecated.
-internal method, do not use thru HConnection
-
-
-
-
-
-
-
-
-
-
+No usage of 
org.apache.hadoop.hbase.MasterNotRunningException
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 8413058..a424e0d 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -297,58 +297,25 @@
 
 
 void
-HConnection.clearCaches(ServerNamesn)
-Deprecated.
-internal method, do not use thru HConnection
-
-
-
-
-void
 Admin.closeRegion(ServerNamesn,
   HRegionInfohri)
 Close a region.
 
 
-
+
 void
 Admin.compactRegionServer(ServerNamesn,
   

[07/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
index 294d686..615907d 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
@@ -118,11 +118,11 @@
 
 
 TableDescriptors
-HMaster.getTableDescriptors()
+MasterServices.getTableDescriptors()
 
 
 TableDescriptors
-MasterServices.getTableDescriptors()
+HMaster.getTableDescriptors()
 
 
 



[35/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/overview-tree.html
--
diff --git a/apidocs/overview-tree.html b/apidocs/overview-tree.html
index 4397d73..0192f92 100644
--- a/apidocs/overview-tree.html
+++ b/apidocs/overview-tree.html
@@ -681,12 +681,7 @@
 org.apache.hadoop.hbase.Abortable
 
 org.apache.hadoop.hbase.client.Admin (also 
extends java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable)
-org.apache.hadoop.hbase.client.Connection 
(also extends java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable)
-
-org.apache.hadoop.hbase.client.HConnection
-
-
-org.apache.hadoop.hbase.client.HConnection
+org.apache.hadoop.hbase.client.Connection 
(also extends java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable)
 
 
 org.apache.hadoop.hbase.client.Attributes
@@ -714,12 +709,7 @@
 org.apache.hadoop.hbase.mapred.TableReduceK,V
 
 
-org.apache.hadoop.hbase.client.Connection 
(also extends org.apache.hadoop.hbase.Abortable)
-
-org.apache.hadoop.hbase.client.HConnection
-
-
-org.apache.hadoop.hbase.client.HConnection
+org.apache.hadoop.hbase.client.Connection 
(also extends org.apache.hadoop.hbase.Abortable)
 org.apache.hadoop.mapred.MapperK1,V1,K2,V2 (also 
extends org.apache.hadoop.io.Closeable, 
org.apache.hadoop.mapred.JobConfigurable)
 
 org.apache.hadoop.hbase.mapred.TableMapK,V
@@ -753,12 +743,7 @@
 org.apache.hadoop.hbase.mapred.TableReduceK,V
 
 
-org.apache.hadoop.hbase.client.Connection 
(also extends org.apache.hadoop.hbase.Abortable, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable)
-
-org.apache.hadoop.hbase.client.HConnection
-
-
-org.apache.hadoop.hbase.client.HConnection
+org.apache.hadoop.hbase.client.Connection 
(also extends org.apache.hadoop.hbase.Abortable, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable)
 org.apache.hadoop.mapred.MapperK1,V1,K2,V2 (also 
extends org.apache.hadoop.io.Closeable, 
org.apache.hadoop.mapred.JobConfigurable)
 
 org.apache.hadoop.hbase.mapred.TableMapK,V
@@ -855,24 +840,24 @@
 org.apache.hadoop.hbase.KeepDeletedCells
 org.apache.hadoop.hbase.ProcedureState
 org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
-org.apache.hadoop.hbase.filter.Filter.ReturnCode
 org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
-org.apache.hadoop.hbase.filter.FilterList.Operator
-org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
 org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
-org.apache.hadoop.hbase.client.Durability
+org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
+org.apache.hadoop.hbase.filter.FilterList.Operator
+org.apache.hadoop.hbase.filter.Filter.ReturnCode
+org.apache.hadoop.hbase.client.CompactType
 org.apache.hadoop.hbase.client.IsolationLevel
-org.apache.hadoop.hbase.client.CompactionState
+org.apache.hadoop.hbase.client.SnapshotType
 org.apache.hadoop.hbase.client.MasterSwitchType
+org.apache.hadoop.hbase.client.CompactionState
+org.apache.hadoop.hbase.client.Durability
 org.apache.hadoop.hbase.client.Consistency
-org.apache.hadoop.hbase.client.CompactType
-org.apache.hadoop.hbase.client.SnapshotType
 org.apache.hadoop.hbase.client.security.SecurityCapability
-org.apache.hadoop.hbase.regionserver.BloomType
 org.apache.hadoop.hbase.quotas.ThrottlingException.Type
+org.apache.hadoop.hbase.quotas.ThrottleType
 org.apache.hadoop.hbase.quotas.QuotaType
 org.apache.hadoop.hbase.quotas.QuotaScope
-org.apache.hadoop.hbase.quotas.ThrottleType
+org.apache.hadoop.hbase.regionserver.BloomType
 
 
 



[49/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index eeccebf..4ec93b9 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,16 +5,16 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
 /Producer (Apache HBase Team)
-/CreationDate (D:20160527144303+00'00')
-/ModDate (D:20160527144303+00'00')
+/CreationDate (D:20160531144155+00'00')
+/ModDate (D:20160531144155+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
 /Names 25 0 R
-/Outlines 3990 0 R
-/PageLabels 4194 0 R
+/Outlines 3989 0 R
+/PageLabels 4193 0 R
 /PageMode /UseOutlines
 /ViewerPreferences [/FitWindow]
 >>
@@ -22,7 +22,7 @@ endobj
 3 0 obj
 << /Type /Pages
 /Count 657
-/Kids [7 0 R 13 0 R 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 39 0 R 43 0 R 47 0 R 58 
0 R 62 0 R 64 0 R 66 0 R 68 0 R 75 0 R 78 0 R 80 0 R 85 0 R 88 0 R 90 0 R 92 0 
R 101 0 R 107 0 R 112 0 R 114 0 R 130 0 R 135 0 R 142 0 R 145 0 R 148 0 R 157 0 
R 168 0 R 184 0 R 188 0 R 192 0 R 194 0 R 198 0 R 204 0 R 206 0 R 208 0 R 210 0 
R 212 0 R 215 0 R 221 0 R 223 0 R 225 0 R 227 0 R 229 0 R 231 0 R 233 0 R 235 0 
R 239 0 R 243 0 R 245 0 R 247 0 R 249 0 R 251 0 R 253 0 R 255 0 R 257 0 R 263 0 
R 266 0 R 268 0 R 270 0 R 272 0 R 277 0 R 282 0 R 287 0 R 290 0 R 294 0 R 309 0 
R 320 0 R 327 0 R 337 0 R 348 0 R 353 0 R 355 0 R 357 0 R 362 0 R 376 0 R 381 0 
R 384 0 R 389 0 R 393 0 R 404 0 R 416 0 R 431 0 R 437 0 R 439 0 R 441 0 R 448 0 
R 459 0 R 470 0 R 481 0 R 484 0 R 487 0 R 491 0 R 495 0 R 498 0 R 501 0 R 503 0 
R 506 0 R 510 0 R 512 0 R 516 0 R 525 0 R 527 0 R 533 0 R 535 0 R 539 0 R 547 0 
R 549 0 R 552 0 R 555 0 R 558 0 R 561 0 R 576 0 R 583 0 R 590 0 R 601 0 R 608 0 
R 616 0 R 624 0 R 627 0 R 631 0 R 634 0
  R 647 0 R 655 0 R 661 0 R 666 0 R 670 0 R 672 0 R 687 0 R 699 0 R 705 0 R 711 
0 R 714 0 R 722 0 R 730 0 R 735 0 R 740 0 R 745 0 R 747 0 R 749 0 R 751 0 R 759 
0 R 768 0 R 772 0 R 779 0 R 787 0 R 793 0 R 797 0 R 804 0 R 808 0 R 813 0 R 821 
0 R 823 0 R 827 0 R 838 0 R 843 0 R 845 0 R 848 0 R 852 0 R 858 0 R 861 0 R 873 
0 R 877 0 R 882 0 R 890 0 R 895 0 R 899 0 R 903 0 R 905 0 R 908 0 R 910 0 R 914 
0 R 916 0 R 919 0 R 924 0 R 928 0 R 933 0 R 937 0 R 944 0 R 948 0 R 953 0 R 966 
0 R 970 0 R 974 0 R 979 0 R 981 0 R 990 0 R 993 0 R 998 0 R 1001 0 R 1010 0 R 
1013 0 R 1019 0 R 1026 0 R 1029 0 R 1031 0 R 1040 0 R 1042 0 R 1044 0 R 1047 0 
R 1049 0 R 1051 0 R 1053 0 R 1055 0 R 1057 0 R 1060 0 R 1063 0 R 1068 0 R 1071 
0 R 1073 0 R 1075 0 R 1077 0 R 1082 0 R 1091 0 R 1094 0 R 1096 0 R 1098 0 R 
1103 0 R 1105 0 R 1108 0 R 1110 0 R 1112 0 R 1114 0 R 1117 0 R 1123 0 R 1128 0 
R 1135 0 R 1140 0 R 1154 0 R 1165 0 R 1170 0 R 1182 0 R 1191 0 R 1207 0 R 1211 
0 R 1221 0 R 1234 0 R 1237 0 R 1249 0 R 1258 0 R
  1266 0 R 1270 0 R 1279 0 R 1284 0 R 1288 0 R 1294 0 R 1300 0 R 1307 0 R 1315 
0 R 1317 0 R 1328 0 R 1330 0 R 1335 0 R 1339 0 R 1344 0 R 1354 0 R 1360 0 R 
1366 0 R 1368 0 R 1370 0 R 1383 0 R 1390 0 R 1399 0 R 1405 0 R 1419 0 R 1427 0 
R 1431 0 R 1440 0 R 1448 0 R 1456 0 R 1462 0 R 1466 0 R 1469 0 R 1471 0 R 1480 
0 R 1483 0 R 1490 0 R 1494 0 R 1497 0 R 1505 0 R 1509 0 R 1512 0 R 1514 0 R 
1522 0 R 1529 0 R 1535 0 R 1540 0 R 1544 0 R 1547 0 R 1553 0 R 1558 0 R 1563 0 
R 1565 0 R 1567 0 R 1570 0 R 1572 0 R 1581 0 R 1584 0 R 1590 0 R 1597 0 R 1601 
0 R 1607 0 R 1610 0 R 1612 0 R 1617 0 R 1620 0 R 1622 0 R 1624 0 R 1626 0 R 
1633 0 R 1643 0 R 1648 0 R 1655 0 R 1659 0 R 1661 0 R 1663 0 R 1665 0 R 1668 0 
R 1670 0 R 1672 0 R 1674 0 R 1678 0 R 1682 0 R 1691 0 R 1693 0 R 1695 0 R 1697 
0 R 1699 0 R 1705 0 R 1707 0 R 1712 0 R 1714 0 R 1716 0 R 1723 0 R 1728 0 R 
1732 0 R 1736 0 R 1739 0 R 1742 0 R 1746 0 R 1748 0 R 1751 0 R 1753 0 R 1755 0 
R 1757 0 R 1761 0 R 1763 0 R 1767 0 R 1769 0 R 1771 0 R 1773 0
  R 1775 0 R 1779 0 R 1782 0 R 1784 0 R 1786 0 R 1794 0 R 1804 0 R 1807 0 R 
1822 0 R 1837 0 R 1841 0 R 1846 0 R 1849 0 R 1852 0 R 1857 0 R 1859 0 R 1866 0 
R 1868 0 R 1871 0 R 1873 0 R 1875 0 R 1877 0 R 1879 0 R 1883 0 R 1885 0 R 1894 
0 R 1901 0 R 1907 0 R 1919 0 R 1933 0 R 1944 0 R 1964 0 R 1966 0 R 1968 0 R 
1972 0 R 1989 0 R 1997 0 R 2004 0 R 2013 0 R 2018 0 R 2028 0 R 2038 0 R 2043 0 
R 2052 0 R 2065 0 R 2082 0 R 2092 0 R 2095 0 R 2104 0 R 2119 0 R 2126 0 R 2129 
0 R 2134 0 R 2139 0 R 2149 0 R 2157 0 R 2160 0 R 2162 0 R 2166 0 R 2179 0 R 
2187 0 R 2193 0 R 2197 0 R 2200 0 R 2202 0 R 2204 0 R 2206 0 R 2208 0 R 2213 0 
R 2215 0 R 2225 0 R 2235 0 R 2242 0 R 2254 0 R 2259 0 R 2263 0 R 2276 0 R 2283 
0 R 2289 0 R 2291 0 R 2302 0 R 2309 0 R 2320 0 R 2324 0 R 2333 0 R 2339 0 R 
2349 0 R 2357 0 R 2365 0 R 2371 0 R 2376 0 R 2380 0 R 2383 0 R 2385 0 R 2392 0 
R 2396 0 R 2400 0 R 2406 0 R 2413 0 R 2418 0 R 2422 0 R 2432 0 R 2437 0 R 2442 
0 R 2455 0 R 2462 0 R 2466 0 R 2471 0 R 2478 0 R 2482 0 R 2487
  0 R 2495 0 R 2501 0 R 

[18/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HConstants.html 
b/devapidocs/org/apache/hadoop/hbase/HConstants.html
index 59d7c7a..7fee815 100644
--- a/devapidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/devapidocs/org/apache/hadoop/hbase/HConstants.html
@@ -4461,54 +4461,54 @@ public static finalhttp://docs.oracle.com/javase/7/docs/api/java/
 See Also:Constant
 Field Values
 
 
-
+
 
 
 
 
-QOS_THRESHOLD
-public static finalint QOS_THRESHOLD
-See Also:Constant
 Field Values
+REPLICATION_QOS
+public static finalint REPLICATION_QOS
+See Also:Constant
 Field Values
 
 
-
+
 
 
 
 
-HIGH_QOS
-public static finalint HIGH_QOS
-See Also:Constant
 Field Values
+REPLAY_QOS
+public static finalint REPLAY_QOS
+See Also:Constant
 Field Values
 
 
-
+
 
 
 
 
-REPLICATION_QOS
-public static finalint REPLICATION_QOS
-See Also:Constant
 Field Values
+QOS_THRESHOLD
+public static finalint QOS_THRESHOLD
+See Also:Constant
 Field Values
 
 
-
+
 
 
 
 
-REPLAY_QOS
-public static finalint REPLAY_QOS
-See Also:Constant
 Field Values
+ADMIN_QOS
+public static finalint ADMIN_QOS
+See Also:Constant
 Field Values
 
 
-
+
 
 
 
 
-ADMIN_QOS
-public static finalint ADMIN_QOS
-See Also:Constant
 Field Values
+HIGH_QOS
+public static finalint HIGH_QOS
+See Also:Constant
 Field Values
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html 
b/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
index a0339eb..41fb732 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
@@ -249,7 +249,7 @@ the order they are declared.
 
 
 values
-public staticKeepDeletedCells[]values()
+public staticKeepDeletedCells[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -266,7 +266,7 @@ for (KeepDeletedCells c : KeepDeletedCells.values())
 
 
 valueOf
-public staticKeepDeletedCellsvalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticKeepDeletedCellsvalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/KeyValue.Type.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValue.Type.html 
b/devapidocs/org/apache/hadoop/hbase/KeyValue.Type.html
index 521f7b6..c31803c 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeyValue.Type.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.Type.html
@@ -331,7 +331,7 @@ the order they are declared.
 
 
 values
-public staticKeyValue.Type[]values()
+public staticKeyValue.Type[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -348,7 +348,7 @@ for (KeyValue.Type c : KeyValue.Type.values())
 
 
 valueOf
-public staticKeyValue.TypevalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticKeyValue.TypevalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/ProcedureState.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ProcedureState.html 
b/devapidocs/org/apache/hadoop/hbase/ProcedureState.html
index fb4dce0..bc58668 100644
--- a/devapidocs/org/apache/hadoop/hbase/ProcedureState.html
+++ b/devapidocs/org/apache/hadoop/hbase/ProcedureState.html
@@ -269,7 +269,7 @@ the order they are declared.
 
 
 values
-public staticProcedureState[]values()
+public staticProcedureState[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be 

[25/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index f2d9292..a1b2ea9 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -280,10 +280,10 @@
 Warnings
 Errors
 
-1772
+1771
 0
 0
-11616
+11536
 
 Files
 
@@ -396,7 +396,7 @@
 org/apache/hadoop/hbase/HColumnDescriptor.java
 0
 0
-26
+27
 
 org/apache/hadoop/hbase/HRegionInfo.java
 0
@@ -571,7 +571,7 @@
 org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
 0
 0
-4
+2
 
 org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java
 0
@@ -668,35 +668,25 @@
 0
 6
 
-org/apache/hadoop/hbase/client/ClusterConnection.java
-0
-0
-15
-
 org/apache/hadoop/hbase/client/ClusterStatusListener.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/CompactType.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ConnectionConfiguration.java
 0
 0
 1
-
-org/apache/hadoop/hbase/client/ConnectionImplementation.java
-0
-0
-18
 
-org/apache/hadoop/hbase/client/ConnectionUtils.java
+org/apache/hadoop/hbase/client/ConnectionImplementation.java
 0
 0
-4
+2
 
 org/apache/hadoop/hbase/client/CoprocessorHConnection.java
 0
@@ -726,44 +716,34 @@
 org/apache/hadoop/hbase/client/HBaseAdmin.java
 0
 0
-56
+54
 
-org/apache/hadoop/hbase/client/HConnection.java
-0
-0
-19
-
 org/apache/hadoop/hbase/client/HRegionLocator.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/HTable.java
 0
 0
-12
-
+8
+
 org/apache/hadoop/hbase/client/HTableInterface.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/HTableMultiplexer.java
 0
 0
 5
-
-org/apache/hadoop/hbase/client/HTableWrapper.java
-0
-0
-4
 
-org/apache/hadoop/hbase/client/Increment.java
+org/apache/hadoop/hbase/client/HTableWrapper.java
 0
 0
-1
+2
 
-org/apache/hadoop/hbase/client/MasterCallable.java
+org/apache/hadoop/hbase/client/Increment.java
 0
 0
 1
@@ -1646,7 +1626,7 @@
 org/apache/hadoop/hbase/http/log/LogLevel.java
 0
 0
-7
+6
 
 org/apache/hadoop/hbase/io/BoundedByteBufferPool.java
 0
@@ -2033,45 +2013,55 @@
 0
 11
 
+org/apache/hadoop/hbase/ipc/AsyncRpcChannelImpl.java
+0
+0
+1
+
 org/apache/hadoop/hbase/ipc/BufferChain.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/ipc/CallRunner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ipc/FifoRpcScheduler.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ipc/MetricsHBaseServer.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/ipc/PriorityFunction.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/ipc/RWQueueRpcExecutor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/ipc/RpcCallContext.java
 0
 0
 2
+
+org/apache/hadoop/hbase/ipc/RpcClientImpl.java
+0
+0
+1
 
 org/apache/hadoop/hbase/ipc/RpcExecutor.java
 0
@@ -2351,7 +2341,7 @@
 org/apache/hadoop/hbase/mapreduce/TableInputFormat.java
 0
 0
-3
+4
 
 org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
 0
@@ -4126,7 +4116,7 @@
 org/apache/hadoop/hbase/regionserver/wal/WALEditsReplaySink.java
 0
 0
-12
+4
 
 org/apache/hadoop/hbase/regionserver/wal/WALUtil.java
 0
@@ -4218,65 +4208,55 @@
 0
 1
 
-org/apache/hadoop/hbase/replication/regionserver/HBaseInterClusterReplicationEndpoint.java
-0
-0
-1
-
 org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSource.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSourceImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/replication/regionserver/MetricsSink.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/replication/regionserver/MetricsSource.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/replication/regionserver/RegionReplicaReplicationEndpoint.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/replication/regionserver/Replication.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/replication/regionserver/ReplicationLoad.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/replication/regionserver/ReplicationSink.java
 0
 0
 15
-
-org/apache/hadoop/hbase/replication/regionserver/ReplicationSinkManager.java
-0
-0
-2
 
 org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java
 0
@@ -4416,7 +4396,7 @@
 org/apache/hadoop/hbase/rest/ScannerResource.java
 0
 0
-4
+5
 
 org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
 0
@@ -4441,7 +4421,7 @@
 org/apache/hadoop/hbase/rest/TableResource.java
 0
 0
-12
+11
 
 org/apache/hadoop/hbase/rest/TableScanResource.java
 0
@@ -5391,12 +5371,12 @@
 org/apache/hadoop/hbase/util/HBaseFsck.java
 0
 0
-88
+86
 
 org/apache/hadoop/hbase/util/HBaseFsckRepair.java
 0
 0
-11
+9
 
 org/apache/hadoop/hbase/util/HFileArchiveUtil.java
 0
@@ -5506,7 +5486,7 @@
 

[10/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
index 38c22d9..1689c47 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
@@ -166,13 +166,13 @@
 
 
 
-private InterProcessLock.MetadataHandler
-ZKInterProcessReadWriteLock.handler
-
-
 protected InterProcessLock.MetadataHandler
 ZKInterProcessLockBase.handler
 
+
+private InterProcessLock.MetadataHandler
+ZKInterProcessReadWriteLock.handler
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
index 4f4468b..e9a6b35 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeepDeletedCells.html
@@ -159,14 +159,14 @@ the order they are declared.
 
 
 private KeepDeletedCells
-ScanInfo.keepDeletedCells
-
-
-private KeepDeletedCells
 ScanQueryMatcher.keepDeletedCells
 whether to return deleted rows
 
 
+
+private KeepDeletedCells
+ScanInfo.keepDeletedCells
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.KeyOnlyKeyValue.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.KeyOnlyKeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.KeyOnlyKeyValue.html
index 6cbd252..025a9fd 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.KeyOnlyKeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.KeyOnlyKeyValue.html
@@ -122,11 +122,11 @@
 
 
 private KeyValue.KeyOnlyKeyValue
-StoreFileWriter.lastBloomKeyOnlyKV
+StoreFileReader.lastBloomKeyOnlyKV
 
 
 private KeyValue.KeyOnlyKeyValue
-StoreFileReader.lastBloomKeyOnlyKV
+StoreFileWriter.lastBloomKeyOnlyKV
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
index 350221f..344c681 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
@@ -201,22 +201,22 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static KeyValue
-KeyValue.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true;
 title="class or interface in java.io">DataInputin)
+KeyValueUtil.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true;
 title="class or interface in java.io">DataInputin)
 
 
 static KeyValue
-KeyValueUtil.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true;
 title="class or interface in java.io">DataInputin)
+KeyValue.create(http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true;
 title="class or interface in java.io">DataInputin)
 
 
 static KeyValue
-KeyValue.create(intlength,
+KeyValueUtil.create(intlength,
 http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true;
 title="class or interface in java.io">DataInputin)
 Create a KeyValue reading length from 
in
 
 
 
 static KeyValue
-KeyValueUtil.create(intlength,
+KeyValue.create(intlength,
 http://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html?is-external=true;
 title="class or interface in java.io">DataInputin)
 Create a KeyValue reading length from 
in
 
@@ -332,31 +332,31 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static KeyValue
-KeyValue.createKeyValueFromKey(byte[]b)
+KeyValueUtil.createKeyValueFromKey(byte[]b)
 
 
 static KeyValue
-KeyValueUtil.createKeyValueFromKey(byte[]b)
+KeyValue.createKeyValueFromKey(byte[]b)
 
 
 static KeyValue
-KeyValue.createKeyValueFromKey(byte[]b,
+KeyValueUtil.createKeyValueFromKey(byte[]b,
   into,
   intl)
 
 
 static KeyValue
-KeyValueUtil.createKeyValueFromKey(byte[]b,
+KeyValue.createKeyValueFromKey(byte[]b,
   into,

[32/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/src-html/org/apache/hadoop/hbase/client/Connection.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Connection.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Connection.html
index 250c3b9..9d0e8d9 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Connection.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Connection.html
@@ -54,135 +54,134 @@
 046 * thread will obtain its own Table 
instance. Caching or pooling of {@link Table} and {@link Admin}
 047 * is not recommended.
 048 *
-049 * pThis class replaces {@link 
HConnection}, which is now deprecated.
-050 * @see ConnectionFactory
-051 * @since 0.99.0
-052 */
-053@InterfaceAudience.Public
-054@InterfaceStability.Evolving
-055public interface Connection extends 
Abortable, Closeable {
-056
-057  /*
-058   * Implementation notes:
-059   *  - Only allow new style of 
interfaces:
-060   *   -- All table names are passed as 
TableName. No more byte[] and string arguments
-061   *   -- Most of the classes with names 
H is deprecated in favor of non-H versions
-062   *   (Table, Connection vs HConnection, 
etc)
-063   *   -- Only real client-facing public 
methods are allowed
-064   *  - Connection should contain only 
getTable(), getAdmin() kind of general methods.
-065   */
-066
-067  /**
-068   * @return Configuration instance being 
used by this Connection instance.
-069   */
-070  Configuration getConfiguration();
-071
-072  /**
-073   * Retrieve a Table implementation for 
accessing a table.
-074   * The returned Table is not thread 
safe, a new instance should be created for each using thread.
-075   * This is a lightweight operation, 
pooling or caching of the returned Table
-076   * is neither required nor desired.
-077   * p
-078   * The caller is responsible for 
calling {@link Table#close()} on the returned
-079   * table instance.
-080   * p
-081   * Since 0.98.1 this method no longer 
checks table existence. An exception
-082   * will be thrown if the table does not 
exist only when the first operation is
-083   * attempted.
-084   * @param tableName the name of the 
table
-085   * @return a Table to use for 
interactions with this table
-086   */
-087  Table getTable(TableName tableName) 
throws IOException;
-088
-089  /**
-090   * Retrieve a Table implementation for 
accessing a table.
-091   * The returned Table is not thread 
safe, a new instance should be created for each using thread.
-092   * This is a lightweight operation, 
pooling or caching of the returned Table
-093   * is neither required nor desired.
-094   * p
-095   * The caller is responsible for 
calling {@link Table#close()} on the returned
-096   * table instance.
-097   * p
-098   * Since 0.98.1 this method no longer 
checks table existence. An exception
-099   * will be thrown if the table does not 
exist only when the first operation is
-100   * attempted.
-101   *
-102   * @param tableName the name of the 
table
-103   * @param pool The thread pool to use 
for batch operations, null to use a default pool.
-104   * @return a Table to use for 
interactions with this table
-105   */
-106  Table getTable(TableName tableName, 
ExecutorService pool)  throws IOException;
-107
-108  /**
-109   * p
-110   * Retrieve a {@link BufferedMutator} 
for performing client-side buffering of writes. The
-111   * {@link BufferedMutator} returned by 
this method is thread-safe. This BufferedMutator will
-112   * use the Connection's 
ExecutorService. This object can be used for long lived operations.
-113   * /p
-114   * p
-115   * The caller is responsible for 
calling {@link BufferedMutator#close()} on
-116   * the returned {@link BufferedMutator} 
instance.
-117   * /p
-118   * p
-119   * This accessor will use the 
connection's ExecutorService and will throw an
-120   * exception in the main thread when an 
asynchronous exception occurs.
-121   *
-122   * @param tableName the name of the 
table
-123   *
-124   * @return a {@link BufferedMutator} 
for the supplied tableName.
-125   */
-126  public BufferedMutator 
getBufferedMutator(TableName tableName) throws IOException;
-127
-128  /**
-129   * Retrieve a {@link BufferedMutator} 
for performing client-side buffering of writes. The
-130   * {@link BufferedMutator} returned by 
this method is thread-safe. This object can be used for
-131   * long lived table operations. The 
caller is responsible for calling
-132   * {@link BufferedMutator#close()} on 
the returned {@link BufferedMutator} instance.
-133   *
-134   * @param params details on how to 
instantiate the {@code BufferedMutator}.
-135   * @return a {@link BufferedMutator} 
for the supplied tableName.
-136   */
-137  public BufferedMutator 
getBufferedMutator(BufferedMutatorParams params) throws IOException;
-138
-139  /**
-140   * Retrieve a RegionLocator 
implementation to inspect region 

[01/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 4f67e099a -> 7fb45f8e4


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
 
b/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
index 1c142c0..7fe5682 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/ConnectionUtils.MasterlessConnection.html
@@ -100,7 +100,7 @@
 
 
 All Implemented Interfaces:
-http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable, http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true;
 title="class or interface in java.lang">AutoCloseable, Abortable, ClusterConnection, Connection, HConnection
+http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable, http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true;
 title="class or interface in java.lang">AutoCloseable, Abortable, ClusterConnection, Connection
 
 
 Enclosing class:
@@ -108,7 +108,7 @@
 
 
 
-static class ConnectionUtils.MasterlessConnection
+static class ConnectionUtils.MasterlessConnection
 extends ConnectionImplementation
 Some tests shut down the master. But table availability is 
a master RPC which is performed on
  region re-lookups.
@@ -147,11 +147,11 @@ extends clusterId,
 clusterStatusListener,
 masterServiceState,
 registry,
 RETRIES_BY_SERVER_KEY,
 rpcTimeout,
 user
 
 
-
+
 
 
-Fields inherited from interfaceorg.apache.hadoop.hbase.client.HConnection
-HBASE_CLIENT_CONNECTION_IMPL
+Fields inherited from interfaceorg.apache.hadoop.hbase.client.ClusterConnection
+HBASE_CLIENT_CONNECTION_IMPL
 
 
 
@@ -196,7 +196,7 @@ extends ConnectionImplementation
-abort,
 cacheLocation,
 clearCaches,
 clearRegionCache,
 clearRegionCache,
 clearRegionCache,
 clearRegionCache,
 close,
 createAsyncProcess,
 deleteCachedRegionLocation,
 finalize,
 getAdmin,
 getAdmin,
 getAdmin, getAsyncProcess,
 getBackoffPolicy,
 getBufferedMutator,
 getBufferedMutator,
 getCachedLocation,
 getClient,
 getConfiguration,
 getConnectionConfiguration,
 getConnectionMetrics,
 getCurrentBatchPool,
 getCurrentMetaLookupPool,
 getCurrentNrHRS,
 getHTableDescriptor,
 getHTableDescriptor,
 getHTableDescriptors,
 getHTableDescriptorsByTableName,
 getKeepAliveMasterService,
 getKeepAliveZooKeeperWatcher,
 getMaster,
 getNewRpcRetryingCallerFactory, href="../../../../../org/apache/hadoop/hbase/client/ConnectionImplementation.html#getNonceGenerator()">getNonceGenerator,
 > href="../../../../../org/apache/hadoop/hbase/client/ConnectionImplementation.html#getNumberOfCachedRegionLocations(org.apache.hadoop.hbase.TableName)">getNumberOfCachedRegionLocations,
 > href="../../../../../org/apache/hadoop/hbase/client/ConnectionImplementation.html#getRegionCachePrefetch(byte[])">getRegionCachePrefetch,
 > href="../../../../../org/apache/hadoop/hbase/client/ConnectionImplementation.html#getRegionCachePrefetch(org.apache.hadoop.hbase.TableName)">getRegionCachePrefetch,
 > href="../../../../../org/apache/hadoop/hbase/client/ConnectionImplementation.html#getRegionLocation(byte[],%20byte[],%20boolean)">getRegionLocation,
 > href="../../../../../org/apache/hadoop/hbase/client/ConnectionImplementation.html#getRegionLocation(org.apache.hadoop.hbase.TableName,%20byte[],%20boolean)">getReg
 ionLocation, getRegionLocator,
 getRpcClient,
 getRpcControllerFactory,
 getRpcRetryingCallerFactory,
 getStatisticsTracker,
 getStubKey,
 getT
 able, getTable,
 getTable,
 getTable,
 getTable,
 getTable,
 getTableNames,
 getTableState, hasCellBlockSupport,
 injectNonceGeneratorForTesting,
 isAborted,
 isClosed,
 isDeadServer,
 isMasterRunning,
 isTableAvailable,
 isTableAvailable,
 isTableAvailable,
 isTableAvailable,
 isTableDisabled,
 isTableEnabled,
 isTableEnabled,
 listTableNames,
 listTables,
 locateRegion,
 locateRegion,
 locateRegion,
 locateRegion,
 locateRegion, locateRegions,
 locateRegions,
 locateRegions,
 locateRegions,
 processBatch,
 processBatch, processBatchCallback,
 processBatchCallback,
 releaseMaster,
 releaseZooKeeperWatcher, relocateRegion,
 relocateRegion,
 relocateRegion,
 retrieveClusterId,
 setRegionCachePrefetch,
 setRegionCachePrefetch,
 <
 a 
href="../../../../../org/apache/hadoop/hbase/client/ConnectionImplementation.html#toString()">toString,
 updateCachedLocation,
 updateCachedLocations,
 updateCachedLocations,
 updateCachedLocations
+abort,
 cacheLocation,
 clearCaches,
 clearRegionCache,
 clearRegionCache,
 clearRegionCache,
 close, createAsyncProcess,
 deleteCachedRegionLocation,
 finalize,
 

[37/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
index 289e2d6..5532fb3 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
@@ -108,11 +108,11 @@
 
 
 protected Order
-OrderedBytesBase.order
+RawBytes.order
 
 
 protected Order
-RawBytes.order
+OrderedBytesBase.order
 
 
 
@@ -125,23 +125,26 @@
 
 
 Order
-RawString.getOrder()
+DataType.getOrder()
+Retrieve the sort Order imposed by this data type, 
or null when
+ natural ordering is not preserved.
+
 
 
 Order
-RawDouble.getOrder()
+Union3.getOrder()
 
 
 Order
-RawInteger.getOrder()
+RawLong.getOrder()
 
 
 Order
-RawFloat.getOrder()
+RawShort.getOrder()
 
 
 Order
-FixedLengthWrapper.getOrder()
+Struct.getOrder()
 
 
 Order
@@ -149,50 +152,47 @@
 
 
 Order
-PBType.getOrder()
+FixedLengthWrapper.getOrder()
 
 
 Order
-OrderedBytesBase.getOrder()
+RawByte.getOrder()
 
 
 Order
-TerminatedWrapper.getOrder()
+RawString.getOrder()
 
 
 Order
-RawBytes.getOrder()
+Union4.getOrder()
 
 
 Order
-RawLong.getOrder()
+RawBytes.getOrder()
 
 
 Order
-RawShort.getOrder()
+TerminatedWrapper.getOrder()
 
 
 Order
-RawByte.getOrder()
+OrderedBytesBase.getOrder()
 
 
 Order
-DataType.getOrder()
-Retrieve the sort Order imposed by this data type, 
or null when
- natural ordering is not preserved.
-
+RawInteger.getOrder()
 
 
 Order
-Struct.getOrder()
+RawDouble.getOrder()
 
 
 Order
-Union3.getOrder()
+RawFloat.getOrder()
 
 
 Order
-Union4.getOrder()
+PBType.getOrder()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
index 268b819..eb232f1 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/Pair.html
@@ -169,11 +169,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 protected Pairbyte[][],byte[][]
-TableInputFormat.getStartEndKeys()
+TableInputFormatBase.getStartEndKeys()
 
 
 protected Pairbyte[][],byte[][]
-TableInputFormatBase.getStartEndKeys()
+TableInputFormat.getStartEndKeys()
 
 
 



[50/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/acid-semantics.html
--
diff --git a/acid-semantics.html b/acid-semantics.html
index 48e412b..ae40c89 100644
--- a/acid-semantics.html
+++ b/acid-semantics.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Apache HBase (TM) ACID Properties
@@ -600,7 +600,7 @@ under the License. -->
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-05-27
+  Last Published: 
2016-05-31
 
 
 



[27/52] [partial] hbase-site git commit: Published site at 75c23605430266da0f30eef04b97ebd4b30c60b8.

2016-05-31 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/db523e4d/apidocs/src-html/org/apache/hadoop/hbase/util/RegionMover.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/RegionMover.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/RegionMover.html
index 59a1891..1904857 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/util/RegionMover.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/util/RegionMover.html
@@ -39,972 +39,973 @@
 031import java.util.Collections;
 032import java.util.Iterator;
 033import java.util.List;
-034import java.util.concurrent.Callable;
-035import 
java.util.concurrent.CancellationException;
-036import 
java.util.concurrent.ExecutionException;
-037import 
java.util.concurrent.ExecutorService;
-038import java.util.concurrent.Executors;
-039import java.util.concurrent.Future;
-040import java.util.concurrent.TimeUnit;
-041import 
java.util.concurrent.TimeoutException;
-042
-043import 
org.apache.commons.cli.CommandLine;
-044import 
org.apache.hadoop.conf.Configuration;
-045import org.apache.commons.logging.Log;
-046import 
org.apache.commons.logging.LogFactory;
-047import 
org.apache.hadoop.hbase.HBaseConfiguration;
-048import 
org.apache.hadoop.hbase.HConstants;
-049import 
org.apache.hadoop.hbase.HRegionInfo;
-050import 
org.apache.hadoop.hbase.ServerName;
-051import 
org.apache.hadoop.hbase.TableName;
-052import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-053import 
org.apache.hadoop.hbase.client.Admin;
-054import 
org.apache.hadoop.hbase.client.Connection;
-055import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-056import 
org.apache.hadoop.hbase.client.Get;
-057import 
org.apache.hadoop.hbase.client.Result;
-058import 
org.apache.hadoop.hbase.client.ResultScanner;
-059import 
org.apache.hadoop.hbase.client.Scan;
-060import 
org.apache.hadoop.hbase.client.Table;
-061import 
org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
-062import 
org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-063import 
org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-064
-065/**
-066 * Tool for loading/unloading regions 
to/from given regionserver This tool can be run from Command
-067 * line directly as a utility. Supports 
Ack/No Ack mode for loading/unloading operations.Ack mode
-068 * acknowledges if regions are online 
after movement while noAck mode is best effort mode that
-069 * improves performance but will still 
move on if region is stuck/not moved. Motivation behind noAck
-070 * mode being RS shutdown where even if a 
Region is stuck, upon shutdown master will move it
-071 * anyways. This can also be used by 
constructiong an Object using the builder and then calling
-072 * {@link #load()} or {@link #unload()} 
methods for the desired operations.
-073 */
-074@InterfaceAudience.Public
-075public class RegionMover extends 
AbstractHBaseTool {
-076  public static final String 
MOVE_RETRIES_MAX_KEY = "hbase.move.retries.max";
-077  public static final String 
MOVE_WAIT_MAX_KEY = "hbase.move.wait.max";
-078  public static final String 
SERVERSTART_WAIT_MAX_KEY = "hbase.serverstart.wait.max";
-079  public static final int 
DEFAULT_MOVE_RETRIES_MAX = 5;
-080  public static final int 
DEFAULT_MOVE_WAIT_MAX = 60;
-081  public static final int 
DEFAULT_SERVERSTART_WAIT_MAX = 180;
-082  static final Log LOG = 
LogFactory.getLog(RegionMover.class);
-083  private RegionMoverBuilder rmbuilder;
-084  private boolean ack = true;
-085  private int maxthreads = 1;
-086  private int timeout;
-087  private String loadUnload;
-088  private String hostname;
-089  private String filename;
-090  private String excludeFile;
-091  private int port;
-092
-093  private RegionMover(RegionMoverBuilder 
builder) {
-094this.hostname = builder.hostname;
-095this.filename = builder.filename;
-096this.excludeFile = 
builder.excludeFile;
-097this.maxthreads = 
builder.maxthreads;
-098this.ack = builder.ack;
-099this.port = builder.port;
-100this.timeout = builder.timeout;
-101  }
-102
-103  private RegionMover() {
-104  }
-105
-106  /**
-107   * Builder for Region mover. Use the 
{@link #build()} method to create RegionMover object. Has
-108   * {@link #filename(String)}, {@link 
#excludeFile(String)}, {@link #maxthreads(int)},
-109   * {@link #ack(boolean)}, {@link 
#timeout(int)} methods to set the corresponding options
-110   */
-111  public static class RegionMoverBuilder 
{
-112private boolean ack = true;
-113private int maxthreads = 1;
-114private int timeout = 
Integer.MAX_VALUE;
-115private String hostname;
-116private String filename;
-117private String excludeFile = null;
-118private String defaultDir = "/tmp";
-119private int port = 
HConstants.DEFAULT_REGIONSERVER_PORT;
-120
-121/**
-122 * @param hostname Hostname to unload 
regions from or load regions to. Can be either hostname
-123 * or hostname:port.
-124 */

hbase git commit: HBASE-15851 Makefile update for build env

2016-05-31 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 f49f262f3 -> 5b10031a1


HBASE-15851 Makefile update for build env

1) Makefile to compile protobuf sources which are extracted in build
2) Added -O2 and -D_GLIBCXX_USE_CXX11_ABI=0 compilation flags
3) Header files added in Makefile


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5b10031a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5b10031a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5b10031a

Branch: refs/heads/HBASE-14850
Commit: 5b10031a1b1821a4403f63db1728e9f380af14f5
Parents: f49f262
Author: sudeeps 
Authored: Sun May 22 09:38:47 2016 +1000
Committer: Elliott Clark 
Committed: Fri May 27 11:32:43 2016 -0700

--
 hbase-native-client/Makefile  | 132 +++--
 hbase-native-client/Makefile.protos   |  53 
 hbase-native-client/core/meta-utils.h |   2 +-
 3 files changed, 159 insertions(+), 28 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5b10031a/hbase-native-client/Makefile
--
diff --git a/hbase-native-client/Makefile b/hbase-native-client/Makefile
index 826233f..7e68b6a 100644
--- a/hbase-native-client/Makefile
+++ b/hbase-native-client/Makefile
@@ -1,37 +1,115 @@
-##
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-build:
-   $(shell buck build core/... )
+#use "gcc" to compile source files
+CC:=g++
+LD:=g++
+ 
+DEBUG_PATH = build/debug
+RELEASE_PATH = build/release
+PROTO_SRC_DIR = build/if
+MODULES = connection core serde test-util utils
+SRC_DIR = $(MODULES)
+DEBUG_BUILD_DIR = $(addprefix $(DEBUG_PATH)/,$(MODULES))
+RELEASE_BUILD_DIR = $(addprefix $(RELEASE_PATH)/,$(MODULES))
+INCLUDE_DIR = . build/
 
-check:
-   $(shell buck test --all --no-results-cache )
+#flags to pass to the CPP compiler & linker
+CPPFLAGS_DEBUG = -D_GLIBCXX_USE_CXX11_ABI=0 -g -Wall -std=c++14 -pedantic -fPIC
+CPPFLAGS_RELEASE = -D_GLIBCXX_USE_CXX11_ABI=0 -DNDEBUG -O2 -Wall -std=c++14 
-pedantic -fPIC
+LDFLAGS = -lprotobuf -lzookeeper_mt -lsasl2 -lfolly -lwangle
+LINKFLAG = -shared
 
-doc:
-   $(shell doxygen hbase.doxygen > /dev/null )
+#define list of source files and object files
+SRC = $(foreach sdir,$(SRC_DIR),$(wildcard $(sdir)/*.cc))
+PROTOSRC = $(patsubst %.proto, $(addprefix build/,%.pb.cc),$(wildcard 
if/*.proto))
+DEPS =  $(foreach sdir,$(SRC_DIR),$(wildcard $(sdir)/*.h))
+PROTODEPS = $(patsubst %.proto, $(addprefix build/,%.pb.h),$(wildcard 
if/*.proto))
+DEBUG_OBJ = $(patsubst %.cc,$(DEBUG_PATH)/%.o,$(SRC))
+DEBUG_OBJ += $(patsubst %.cc,$(DEBUG_PATH)/%.o,$(PROTOSRC))
+RELEASE_OBJ = $(patsubst %.cc,$(RELEASE_PATH)/%.o,$(SRC))
+INCLUDES = $(addprefix -I,$(INCLUDE_DIR))
+   
+LIB_DIR = /usr/local
+LIB_LIBDIR = $(LIB_DIR)/lib
+LIB_INCDIR = $(LIB_DIR)/include
+LIB_RELEASE=$(RELEASE_PATH)/libHbaseClient.so
+ARC_RELEASE=$(RELEASE_PATH)/libHbaseClient.a
+LIB_DEBUG=$(DEBUG_PATH)/libHbaseClient_d.so
+ARC_DEBUG=$(DEBUG_PATH)/libHbaseClient_d.a
+
+vpath %.cc $(SRC_DIR)
+
+$(LIB_DEBUG): $(DEBUG_BUILD_DIR)
+define make-goal-dbg
+$1/%.o: %.cc $(DEPS) $(PROTODEPS) $(PROTOSRC)
+   $(CC) -c $$< -o $$@ $(CPPFLAGS_DEBUG) $(INCLUDES)
+endef
+
+$(LIB_RELEASE): $(RELEASE_BUILD_DIR)
+define make-goal-rel
+$1/%.o: %.cc $(DEPS) $(PROTODEPS) $(PROTOSRC)
+   $(CC) -c $$< -o $$@ $(CPPFLAGS_RELEASE) $(INCLUDES) 
+endef
+
+.PHONY: all clean install 
+
+build: checkdirs protos $(LIB_DEBUG) $(LIB_RELEASE) $(ARC_DEBUG) 
$(ARC_RELEASE) 
+
+checkdirs: $(DEBUG_BUILD_DIR) $(RELEASE_BUILD_DIR) $(PROTO_SRC_DIR)
+
+protos: createprotosrc
+   @make all -f Makefile.protos
+
+createprotosrc:$(PROTO_SRC_DIR)
+   @protoc --proto_path=if --cpp_out=$(PROTO_SRC_DIR) if/*.proto
+
+install:
+   cp $(LIB_RELEASE) $(LIB_LIBDIR)/libHbaseClient.so
+   cp $(ARC_RELEASE) $(LIB_LIBDIR)/libHbaseClient.a
+   cp $(LIB_DEBUG) $(LIB_LIBDIR)/libHbaseClient_d.so
+   cp $(ARC_DEBUG) $(LIB_LIBDIR)/libHbaseClient_d.a
+   

hbase git commit: HBASE-15917 Addendum. Fix bug in report-flakies.py where hanging tests are not being added to flaky list. (Apekshit) ADDENDUM #2!

2016-05-31 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master c80e23264 -> eb64cd9dd


HBASE-15917 Addendum. Fix bug in report-flakies.py where hanging tests are not 
being added to flaky list. (Apekshit)
ADDENDUM #2!

Change-Id: I9c55932d0f9e65b72ec8d3ae714144536b2bfe0a

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/eb64cd9d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/eb64cd9d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/eb64cd9d

Branch: refs/heads/master
Commit: eb64cd9dd13ba297539c409989c63e800cb378a1
Parents: c80e232
Author: Apekshit 
Authored: Tue May 31 02:29:40 2016 -0700
Committer: stack 
Committed: Tue May 31 10:16:40 2016 -0700

--
 dev-support/report-flakies.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/eb64cd9d/dev-support/report-flakies.py
--
diff --git a/dev-support/report-flakies.py b/dev-support/report-flakies.py
index bdc88dc..c0d16c7 100755
--- a/dev-support/report-flakies.py
+++ b/dev-support/report-flakies.py
@@ -161,7 +161,7 @@ for url_max_build in expanded_urls:
 print ""
 
 
-all_bad_tests = all_timeout_tests.union(all_failed_tests)
+all_bad_tests = all_hanging_tests.union(all_failed_tests)
 if args.mvn:
 includes = ",".join(all_bad_tests)
 with open("./includes", "w") as inc_file:



hbase git commit: Remove the hbasecon banner logo

2016-05-31 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 5ea2f0923 -> c80e23264


Remove the hbasecon banner logo


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c80e2326
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c80e2326
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c80e2326

Branch: refs/heads/master
Commit: c80e23264204c651ec7bd3e0fbf294ac728359e6
Parents: 5ea2f09
Author: stack 
Authored: Tue May 31 10:14:40 2016 -0700
Committer: stack 
Committed: Tue May 31 10:14:40 2016 -0700

--
 src/main/site/site.xml | 5 -
 1 file changed, 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c80e2326/src/main/site/site.xml
--
diff --git a/src/main/site/site.xml b/src/main/site/site.xml
index 6d4de53..fb237bb 100644
--- a/src/main/site/site.xml
+++ b/src/main/site/site.xml
@@ -43,13 +43,8 @@
 
   
   
-hbasecon2016
-
-images/hbasecon2016-stacked.png
-http://hbasecon.com/
   
   
 Apache HBase



hbase git commit: HBASE-15919 Modify docs to change from @Rule to @ClassRule. Also clarify that timeout limits are on test case level. (Apekshit)

2016-05-31 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 75c236054 -> 5ea2f0923


HBASE-15919 Modify docs to change from @Rule to @ClassRule. Also clarify that 
timeout limits are on test case level. (Apekshit)

Change-Id: Ifcd0264ea147bcb1100db74d92da95b643f4793f

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5ea2f092
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5ea2f092
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5ea2f092

Branch: refs/heads/master
Commit: 5ea2f092332515eea48136d7d92f7b8ea72df15b
Parents: 75c2360
Author: Apekshit 
Authored: Tue May 31 03:30:50 2016 -0700
Committer: stack 
Committed: Tue May 31 10:12:00 2016 -0700

--
 src/main/asciidoc/_chapters/developer.adoc | 93 +
 1 file changed, 33 insertions(+), 60 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5ea2f092/src/main/asciidoc/_chapters/developer.adoc
--
diff --git a/src/main/asciidoc/_chapters/developer.adoc 
b/src/main/asciidoc/_chapters/developer.adoc
index a11a04e..74ce3df 100644
--- a/src/main/asciidoc/_chapters/developer.adoc
+++ b/src/main/asciidoc/_chapters/developer.adoc
@@ -864,7 +864,8 @@ Also, keep in mind that if you are running tests in the 
`hbase-server` module yo
 [[hbase.unittests]]
 === Unit Tests
 
-Apache HBase unit tests are subdivided into four categories: small, medium, 
large, and integration with corresponding JUnit 
link:http://www.junit.org/node/581[categories]: `SmallTests`, `MediumTests`, 
`LargeTests`, `IntegrationTests`.
+Apache HBase test cases are subdivided into four categories: small, medium, 
large, and
+integration with corresponding JUnit 
link:http://www.junit.org/node/581[categories]: `SmallTests`, `MediumTests`, 
`LargeTests`, `IntegrationTests`.
 JUnit categories are denoted using java annotations and look like this in your 
unit test code.
 
 [source,java]
@@ -879,10 +880,11 @@ public class TestHRegionInfo {
 }
 
 
-The above example shows how to mark a unit test as belonging to the `small` 
category.
-All unit tests in HBase have a categorization.
+The above example shows how to mark a test case as belonging to the `small` 
category.
+All test cases in HBase should have a categorization.
 
-The first three categories, `small`, `medium`, and `large`, are for tests run 
when you type `$ mvn test`.
+The first three categories, `small`, `medium`, and `large`, are for test cases 
which run when you
+type `$ mvn test`.
 In other words, these three categorizations are for HBase unit tests.
 The `integration` category is not for unit tests, but for integration tests.
 These are run when you invoke `$ mvn verify`.
@@ -890,22 +892,23 @@ Integration tests are described in 
<>.
 
 HBase uses a patched maven surefire plugin and maven profiles to implement its 
unit test characterizations.
 
-Keep reading to figure which annotation of the set small, medium, and large to 
put on your new HBase unit test.
+Keep reading to figure which annotation of the set small, medium, and large to 
put on your new
+HBase test case.
 
 .Categorizing Tests
 Small Tests (((SmallTests)))::
-  _Small_ tests are executed in a shared JVM.
-  We put in this category all the tests that can be executed quickly in a 
shared JVM.
-  The maximum execution time for a small test is 15 seconds, and small tests 
should not use a (mini)cluster.
+  _Small_ test cases are executed in a shared JVM and individual test cases 
should run in 15 seconds
+   or less; i.e. a link:https://en.wikipedia.org/wiki/JUnit[junit test 
fixture], a java object made
+   up of test methods, should finish in under 15 seconds. These test cases can 
not use mini cluster.
+   These are run as part of patch pre-commit.
 
 Medium Tests (((MediumTests)))::
-  _Medium_ tests represent tests that must be executed before proposing a 
patch.
-  They are designed to run in less than 30 minutes altogether, and are quite 
stable in their results.
-  They are designed to last less than 50 seconds individually.
-  They can use a cluster, and each of them is executed in a separate JVM.
+  _Medium_ test cases are executed in separate JVM and individual test case 
should run in 50 seconds
+   or less. Together, they should take less than 30 minutes, and are quite 
stable in their results.
+   These test cases can use a mini cluster. These are run as part of patch 
pre-commit.
 
 Large Tests (((LargeTests)))::
-  _Large_ tests are everything else.
+  _Large_ test cases are everything else.
   They are typically large-scale tests, regression tests for specific bugs, 
timeout tests, performance tests.
   They are executed