hbase git commit: HBASE-15799 Two Shell 'close_region' Example Syntaxes Don't Work (Matt Warhaftig)

2016-05-09 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 8604f9eeb -> 3b74b6f32


HBASE-15799 Two Shell 'close_region' Example Syntaxes Don't Work (Matt 
Warhaftig)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3b74b6f3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3b74b6f3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3b74b6f3

Branch: refs/heads/master
Commit: 3b74b6f329ad6ebc0d2d2548a7e1290297674529
Parents: 8604f9e
Author: tedyu 
Authored: Mon May 9 20:58:55 2016 -0700
Committer: tedyu 
Committed: Mon May 9 20:58:55 2016 -0700

--
 hbase-shell/src/main/ruby/hbase/admin.rb|  6 ++--
 .../main/ruby/shell/commands/close_region.rb|  1 -
 hbase-shell/src/test/ruby/hbase/admin_test.rb   | 29 
 3 files changed, 33 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3b74b6f3/hbase-shell/src/main/ruby/hbase/admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb 
b/hbase-shell/src/main/ruby/hbase/admin.rb
index 7ea315f..88a6598 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -433,8 +433,10 @@ module Hbase
 # If server name is nil, we presume region_name is full region name 
(HRegionInfo.getRegionName).
 # If server name is not nil, we presume it is the region's encoded name 
(HRegionInfo.getEncodedName)
 def close_region(region_name, server)
-  if (server == nil || !closeEncodedRegion?(region_name, server))
-   @admin.closeRegion(region_name, server)
+  if (region_name.end_with? ".")
+@admin.closeRegion(region_name, server)
+  else
+closeEncodedRegion?(region_name, server)
   end
 end
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/3b74b6f3/hbase-shell/src/main/ruby/shell/commands/close_region.rb
--
diff --git a/hbase-shell/src/main/ruby/shell/commands/close_region.rb 
b/hbase-shell/src/main/ruby/shell/commands/close_region.rb
index 9e2900c..ef1c99c 100644
--- a/hbase-shell/src/main/ruby/shell/commands/close_region.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/close_region.rb
@@ -46,7 +46,6 @@ Examples:
 
   hbase> close_region 'REGIONNAME'
   hbase> close_region 'REGIONNAME', 'SERVER_NAME'
-  hbase> close_region 'ENCODED_REGIONNAME'
   hbase> close_region 'ENCODED_REGIONNAME', 'SERVER_NAME'
 EOF
   end

http://git-wip-us.apache.org/repos/asf/hbase/blob/3b74b6f3/hbase-shell/src/test/ruby/hbase/admin_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/admin_test.rb 
b/hbase-shell/src/test/ruby/hbase/admin_test.rb
index 54f7418..50a65d0 100644
--- a/hbase-shell/src/test/ruby/hbase/admin_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/admin_test.rb
@@ -238,6 +238,35 @@ module Hbase
 end
   end
 
+  # Simple administration methods tests
+  class AdminRegionTest < Test::Unit::TestCase
+include TestHelpers
+def setup
+  setup_hbase
+  # Create test table if it does not exist
+  @test_name = "hbase_shell_tests_table"
+  drop_test_table(@test_name)
+  create_test_table(@test_name)
+end
+
+def teardown
+  shutdown
+end
+
+define_test "close_region should allow encoded & non-encoded region names" 
do
+  region = admin.locate_region(@test_name, '')
+  serverName = region.getServerName().getServerName()
+  regionName = region.getRegionInfo().getRegionNameAsString()
+  encodedRegionName = region.getRegionInfo().getEncodedName()
+
+  # Close region with just region name.
+  admin.close_region(regionName, nil)
+  # Close region with region name and server.
+  admin.close_region(regionName, serverName)
+  admin.close_region(encodedRegionName, serverName)
+end
+  end
+
  # Simple administration methods tests
   class AdminAlterTableTest < Test::Unit::TestCase
 include TestHelpers



[3/4] hbase git commit: HBASE-15415 Improve Master WebUI snapshot information (huaxiang sun)

2016-05-09 Thread mbertozzi
HBASE-15415 Improve Master WebUI snapshot information (huaxiang sun)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/921f745b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/921f745b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/921f745b

Branch: refs/heads/branch-1.3
Commit: 921f745b25ae6b72bb81a4ace1c7a92c0e376c14
Parents: 1c823ae
Author: Matteo Bertozzi 
Authored: Mon May 9 20:22:07 2016 -0700
Committer: Matteo Bertozzi 
Committed: Mon May 9 20:26:51 2016 -0700

--
 .../hbase/tmpl/master/MasterStatusTmpl.jamon|   2 +-
 .../hadoop/hbase/snapshot/SnapshotInfo.java | 202 ---
 .../hbase/snapshot/SnapshotReferenceUtil.java   |  70 ---
 .../hbase-webapps/master/snapshotsStats.jsp | 157 ++
 4 files changed, 378 insertions(+), 53 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/921f745b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
index 7429498..2a5bfc7 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
@@ -476,7 +476,7 @@ AssignmentManager assignmentManager = 
master.getAssignmentManager();
 <% new Date(snapshotDesc.getCreationTime()) %>
 
 
-<% snapshots.size() %> snapshot(s) in set.
+<% snapshots.size() %> snapshot(s) in set. [Snapshot Storefile stats]
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/921f745b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
index d93535b..2e9a8d9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
@@ -23,8 +23,12 @@ import java.io.FileNotFoundException;
 import java.net.URI;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.Date;
 import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -122,6 +126,7 @@ public final class SnapshotInfo extends Configured 
implements Tool {
 private AtomicInteger logsCount = new AtomicInteger();
 private AtomicLong hfileArchiveSize = new AtomicLong();
 private AtomicLong hfileSize = new AtomicLong();
+private AtomicLong nonSharedHfilesArchiveSize = new AtomicLong();
 private AtomicLong logSize = new AtomicLong();
 
 private final SnapshotDescription snapshot;
@@ -194,6 +199,15 @@ public final class SnapshotInfo extends Configured 
implements Tool {
   return hfileArchiveSize.get();
 }
 
+/** @return the total size of the store files in the archive which is not 
shared
+ *with other snapshots and tables.
+ *This is only calculated when getSnapshotStats(Configuration, 
SnapshotDescription, Map)
+ *is called with a non-null Map
+ */
+public long getNonSharedArchivedStoreFilesSize() {
+  return nonSharedHfilesArchiveSize.get();
+}
+
 /** @return the percentage of the shared store files */
 public float getSharedStoreFilePercentage() {
   return ((float)hfileSize.get() / (hfileSize.get() + 
hfileArchiveSize.get())) * 100;
@@ -204,15 +218,46 @@ public final class SnapshotInfo extends Configured 
implements Tool {
   return logSize.get();
 }
 
+/** Check if for a give file in archive, if there are other 
snapshots/tables still
+ * reference it.
+ * @param filePath file path in archive
+ * @param snapshotFilesMap a map for store files in snapshots about how 
many snapshots refer
+ * to it.
+ * @return true or false
+ */
+private boolean isArchivedFileStillReferenced(final Path filePath,
+final Map snapshotFilesMap) {
+
+  Integer c = snapshotFilesMap.get(filePath);
+
+  // Check if there are other snapshots or table from clone_snapshot() 
(via back-reference)
+  // still reference to it.
+  if ((c != null) && (c 

[1/4] hbase git commit: HBASE-15415 Improve Master WebUI snapshot information (huaxiang sun)

2016-05-09 Thread mbertozzi
Repository: hbase
Updated Branches:
  refs/heads/branch-1 690b44d29 -> 98d13c745
  refs/heads/branch-1.3 6ba2ada1b -> 921f745b2
  refs/heads/master 224b03b2a -> 8604f9eeb


HBASE-15415 Improve Master WebUI snapshot information (huaxiang sun)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8604f9ee
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8604f9ee
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8604f9ee

Branch: refs/heads/master
Commit: 8604f9eebb08eef10ea99043ea5758d08d8dbf6b
Parents: 224b03b
Author: Matteo Bertozzi 
Authored: Mon May 9 20:18:13 2016 -0700
Committer: Matteo Bertozzi 
Committed: Mon May 9 20:20:49 2016 -0700

--
 .../hbase/tmpl/master/MasterStatusTmpl.jamon|   2 +-
 .../hadoop/hbase/snapshot/SnapshotInfo.java | 205 +--
 .../hbase/snapshot/SnapshotReferenceUtil.java   |  70 ---
 .../hbase-webapps/master/snapshotsStats.jsp | 149 ++
 4 files changed, 384 insertions(+), 42 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8604f9ee/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
index 38bcf22..e39ad8a 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
@@ -477,7 +477,7 @@ AssignmentManager assignmentManager = 
master.getAssignmentManager();
 <% new Date(snapshotDesc.getCreationTime()) %>
 
 
-<% snapshots.size() %> snapshot(s) in set.
+<% snapshots.size() %> snapshot(s) in set. [Snapshot Storefile stats]
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/8604f9ee/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
index c944fc4..3c5033e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
@@ -23,8 +23,12 @@ import java.io.FileNotFoundException;
 import java.net.URI;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.Date;
 import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -126,6 +130,7 @@ public final class SnapshotInfo extends Configured 
implements Tool {
 private AtomicLong hfilesArchiveSize = new AtomicLong();
 private AtomicLong hfilesSize = new AtomicLong();
 private AtomicLong hfilesMobSize = new AtomicLong();
+private AtomicLong nonSharedHfilesArchiveSize = new AtomicLong();
 private AtomicLong logSize = new AtomicLong();
 
 private final HBaseProtos.SnapshotDescription snapshot;
@@ -142,6 +147,15 @@ public final class SnapshotInfo extends Configured 
implements Tool {
   this.fs = fs;
 }
 
+SnapshotStats(final Configuration conf, final FileSystem fs,
+final HBaseProtos.SnapshotDescription snapshot) {
+  this.snapshot = snapshot;
+  this.snapshotTable = TableName.valueOf(snapshot.getTable());
+  this.conf = conf;
+  this.fs = fs;
+}
+
+
 /** @return the snapshot descriptor */
 public SnapshotDescription getSnapshotDescription() {
   return new SnapshotDescription(this.snapshot.getName(), 
this.snapshot.getTable(),
@@ -207,6 +221,17 @@ public final class SnapshotInfo extends Configured 
implements Tool {
 /** @return the total size of the store files in the mob store*/
 public long getMobStoreFilesSize() { return hfilesMobSize.get(); }
 
+/** @return the total size of the store files in the archive which is not 
shared
+ *with other snapshots and tables
+ *
+ *This is only calculated when
+ *  {@link #getSnapshotStats(Configuration, 
HBaseProtos.SnapshotDescription, Map)}
+ *is called with a non-null Map
+ */
+public long getNonSharedArchivedStoreFilesSize() {
+  return nonSharedHfilesArchiveSize.get();
+}
+
 /** @return the percentage of the shared store files */
 public float 

[4/4] hbase git commit: HBASE-15526 Make SnapshotManager accessible through MasterServices

2016-05-09 Thread mbertozzi
HBASE-15526 Make SnapshotManager accessible through MasterServices


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1c823ae5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1c823ae5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1c823ae5

Branch: refs/heads/branch-1.3
Commit: 1c823ae5c7d267817150396f7de906ce829b805f
Parents: 6ba2ada
Author: tedyu 
Authored: Thu Mar 24 16:02:00 2016 -0700
Committer: Matteo Bertozzi 
Committed: Mon May 9 20:26:51 2016 -0700

--
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java  | 3 +--
 .../java/org/apache/hadoop/hbase/master/MasterServices.java| 6 ++
 .../org/apache/hadoop/hbase/master/TestCatalogJanitor.java | 6 ++
 .../hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java| 4 ++--
 4 files changed, 15 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1c823ae5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 126a391..9323c36 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -2480,10 +2480,9 @@ public class HMaster extends HRegionServer implements 
MasterServices, Server {
   }
 
   /**
-   * Exposed for TESTING!
* @return the underlying snapshot manager
*/
-  public SnapshotManager getSnapshotManagerForTesting() {
+  public SnapshotManager getSnapshotManager() {
 return this.snapshotManager;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/1c823ae5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
index cd6b375..a1bcd7d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.TableNotDisabledException;
 import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
 import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
 import org.apache.hadoop.hbase.executor.ExecutorService;
 import org.apache.hadoop.hbase.quotas.MasterQuotaManager;
@@ -46,6 +47,11 @@ import com.google.protobuf.Service;
 @InterfaceAudience.Private
 public interface MasterServices extends Server {
   /**
+   * @return the underlying snapshot manager
+   */
+  SnapshotManager getSnapshotManager();
+
+  /**
* @return Master's instance of the {@link AssignmentManager}
*/
   AssignmentManager getAssignmentManager();

http://git-wip-us.apache.org/repos/asf/hbase/blob/1c823ae5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
index 6dc0eb9..2d1c0e8 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
@@ -64,6 +64,7 @@ import org.apache.hadoop.hbase.executor.ExecutorService;
 import org.apache.hadoop.hbase.io.Reference;
 import 
org.apache.hadoop.hbase.master.CatalogJanitor.SplitParentFirstComparator;
 import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
 import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
@@ -236,6 +237,11 @@ public class TestCatalogJanitor {
 }
 
 @Override
+public SnapshotManager getSnapshotManager() {
+  return null;
+}
+
+@Override
 public AssignmentManager getAssignmentManager() {
   return this.asm;
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/1c823ae5/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestSnapshotFromMaster.java

[2/4] hbase git commit: HBASE-15415 Improve Master WebUI snapshot information (huaxiang sun)

2016-05-09 Thread mbertozzi
HBASE-15415 Improve Master WebUI snapshot information (huaxiang sun)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/98d13c74
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/98d13c74
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/98d13c74

Branch: refs/heads/branch-1
Commit: 98d13c745cf6b8c897225903237f27f239f97ad4
Parents: 690b44d
Author: Matteo Bertozzi 
Authored: Mon May 9 20:22:07 2016 -0700
Committer: Matteo Bertozzi 
Committed: Mon May 9 20:22:07 2016 -0700

--
 .../hbase/tmpl/master/MasterStatusTmpl.jamon|   2 +-
 .../hadoop/hbase/snapshot/SnapshotInfo.java | 202 ---
 .../hbase/snapshot/SnapshotReferenceUtil.java   |  70 ---
 .../hbase-webapps/master/snapshotsStats.jsp | 157 ++
 4 files changed, 378 insertions(+), 53 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/98d13c74/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
index 7429498..2a5bfc7 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
@@ -476,7 +476,7 @@ AssignmentManager assignmentManager = 
master.getAssignmentManager();
 <% new Date(snapshotDesc.getCreationTime()) %>
 
 
-<% snapshots.size() %> snapshot(s) in set.
+<% snapshots.size() %> snapshot(s) in set. [Snapshot Storefile stats]
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/98d13c74/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
index d93535b..2e9a8d9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
@@ -23,8 +23,12 @@ import java.io.FileNotFoundException;
 import java.net.URI;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.Date;
 import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -122,6 +126,7 @@ public final class SnapshotInfo extends Configured 
implements Tool {
 private AtomicInteger logsCount = new AtomicInteger();
 private AtomicLong hfileArchiveSize = new AtomicLong();
 private AtomicLong hfileSize = new AtomicLong();
+private AtomicLong nonSharedHfilesArchiveSize = new AtomicLong();
 private AtomicLong logSize = new AtomicLong();
 
 private final SnapshotDescription snapshot;
@@ -194,6 +199,15 @@ public final class SnapshotInfo extends Configured 
implements Tool {
   return hfileArchiveSize.get();
 }
 
+/** @return the total size of the store files in the archive which is not 
shared
+ *with other snapshots and tables.
+ *This is only calculated when getSnapshotStats(Configuration, 
SnapshotDescription, Map)
+ *is called with a non-null Map
+ */
+public long getNonSharedArchivedStoreFilesSize() {
+  return nonSharedHfilesArchiveSize.get();
+}
+
 /** @return the percentage of the shared store files */
 public float getSharedStoreFilePercentage() {
   return ((float)hfileSize.get() / (hfileSize.get() + 
hfileArchiveSize.get())) * 100;
@@ -204,15 +218,46 @@ public final class SnapshotInfo extends Configured 
implements Tool {
   return logSize.get();
 }
 
+/** Check if for a give file in archive, if there are other 
snapshots/tables still
+ * reference it.
+ * @param filePath file path in archive
+ * @param snapshotFilesMap a map for store files in snapshots about how 
many snapshots refer
+ * to it.
+ * @return true or false
+ */
+private boolean isArchivedFileStillReferenced(final Path filePath,
+final Map snapshotFilesMap) {
+
+  Integer c = snapshotFilesMap.get(filePath);
+
+  // Check if there are other snapshots or table from clone_snapshot() 
(via back-reference)
+  // still reference to it.
+  if ((c != null) && (c 

hbase git commit: HBASE-15801 Upgrade checkstyle for all branches

2016-05-09 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/0.98 5285b15bf -> 2c9fe0753


HBASE-15801 Upgrade checkstyle for all branches


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2c9fe075
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2c9fe075
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2c9fe075

Branch: refs/heads/0.98
Commit: 2c9fe075368ea219b83348e24b7578ad77c52fd4
Parents: 5285b15
Author: zhangduo 
Authored: Mon May 9 14:42:27 2016 +0800
Committer: zhangduo 
Committed: Mon May 9 15:42:35 2016 +0800

--
 pom.xml | 15 +--
 1 file changed, 13 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2c9fe075/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 965d979..38a73c7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -768,13 +768,18 @@
 
   org.apache.maven.plugins
   maven-checkstyle-plugin
-  2.13
+  2.17
   
 
   org.apache.hbase
   hbase-checkstyle
   ${project.version}
 
+
+  com.puppycrawl.tools
+  checkstyle
+  ${checkstyle.version}
+
   
   
 hbase/checkstyle.xml
@@ -889,6 +894,11 @@
 hbase-checkstyle
 ${project.version}
   
+  
+com.puppycrawl.tools
+checkstyle
+${checkstyle.version}
+  
 
 
   hbase/checkstyle.xml
@@ -1110,6 +1120,7 @@
 2.5.2
 1.3.9-1
 3.3
+6.18
 2.9
 2.6
 1.5.2.1
@@ -2270,7 +2281,7 @@
   
 org.apache.maven.plugins
 maven-checkstyle-plugin
-2.13
+2.17
 
   hbase/checkstyle.xml
   
hbase/checkstyle-suppressions.xml



hbase git commit: HBASE-15801 Upgrade checkstyle for all branches

2016-05-09 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1.0 a3e77bf36 -> edc0a171f


HBASE-15801 Upgrade checkstyle for all branches


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/edc0a171
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/edc0a171
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/edc0a171

Branch: refs/heads/branch-1.0
Commit: edc0a171f1e449ffe2b2749f53dcbe76d067acbd
Parents: a3e77bf
Author: zhangduo 
Authored: Mon May 9 14:42:27 2016 +0800
Committer: zhangduo 
Committed: Mon May 9 15:35:14 2016 +0800

--
 pom.xml | 15 +--
 1 file changed, 13 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/edc0a171/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 1ab1f58..1ced55c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -781,13 +781,18 @@
 
   org.apache.maven.plugins
   maven-checkstyle-plugin
-  2.13
+  2.17
   
 
   org.apache.hbase
   hbase-checkstyle
   ${project.version}
 
+
+  com.puppycrawl.tools
+  checkstyle
+  ${checkstyle.version}
+
   
   
 hbase/checkstyle.xml
@@ -902,6 +907,11 @@
 hbase-checkstyle
 ${project.version}
   
+  
+com.puppycrawl.tools
+checkstyle
+${checkstyle.version}
+  
 
 
   hbase/checkstyle.xml
@@ -1174,6 +1184,7 @@
 1.6
 2.3.4
 1.3.9-1
+6.18
 2.9
 1.5.2.1
 
@@ -2455,7 +2466,7 @@
   
 org.apache.maven.plugins
 maven-checkstyle-plugin
-2.13
+2.17
 
   hbase/checkstyle.xml
   
hbase/checkstyle-suppressions.xml



hbase git commit: HBASE-15801 Upgrade checkstyle for all branches

2016-05-09 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1.1 1b4e4ea62 -> 51baea64d


HBASE-15801 Upgrade checkstyle for all branches


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/51baea64
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/51baea64
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/51baea64

Branch: refs/heads/branch-1.1
Commit: 51baea64d6528bdca61b7b924210dc8fffaa7c75
Parents: 1b4e4ea
Author: zhangduo 
Authored: Mon May 9 14:42:27 2016 +0800
Committer: zhangduo 
Committed: Tue May 10 10:15:37 2016 +0800

--
 pom.xml | 15 +--
 1 file changed, 13 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/51baea64/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 7a1e271..f9a485d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -783,13 +783,18 @@
 
   org.apache.maven.plugins
   maven-checkstyle-plugin
-  2.13
+  2.17
   
 
   org.apache.hbase
   hbase-checkstyle
   ${project.version}
 
+
+  com.puppycrawl.tools
+  checkstyle
+  ${checkstyle.version}
+
   
   
 hbase/checkstyle.xml
@@ -905,6 +910,11 @@
 hbase-checkstyle
 ${project.version}
   
+  
+com.puppycrawl.tools
+checkstyle
+${checkstyle.version}
+  
 
 
   hbase/checkstyle.xml
@@ -1181,6 +1191,7 @@
 1.6
 2.3.4
 1.3.9-1
+6.18
 2.9
 1.5.2.1
 
@@ -2506,7 +2517,7 @@
   
 org.apache.maven.plugins
 maven-checkstyle-plugin
-2.13
+2.17
 
   hbase/checkstyle.xml
   
hbase/checkstyle-suppressions.xml



hbase git commit: HBASE-15801 Upgrade checkstyle for all branches

2016-05-09 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 4561e6c6c -> 258d96021


HBASE-15801 Upgrade checkstyle for all branches


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/258d9602
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/258d9602
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/258d9602

Branch: refs/heads/branch-1.2
Commit: 258d9602133188b7010c3da6ee91b2e1c281b61c
Parents: 4561e6c
Author: zhangduo 
Authored: Mon May 9 14:42:27 2016 +0800
Committer: zhangduo 
Committed: Tue May 10 10:06:22 2016 +0800

--
 pom.xml | 15 +--
 1 file changed, 13 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/258d9602/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 77786c2..45c2ad9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -791,13 +791,18 @@
 
   org.apache.maven.plugins
   maven-checkstyle-plugin
-  2.13
+  2.17
   
 
   org.apache.hbase
   hbase-checkstyle
   ${project.version}
 
+
+  com.puppycrawl.tools
+  checkstyle
+  ${checkstyle.version}
+
   
   
 hbase/checkstyle.xml
@@ -913,6 +918,11 @@
 hbase-checkstyle
 ${project.version}
   
+  
+com.puppycrawl.tools
+checkstyle
+${checkstyle.version}
+  
 
 
   hbase/checkstyle.xml
@@ -1188,6 +1198,7 @@
 1.6
 2.4.1
 1.3.9-1
+6.18
 2.10.3
 1.5.2.1
 
@@ -2545,7 +2556,7 @@
   
 org.apache.maven.plugins
 maven-checkstyle-plugin
-2.13
+2.17
 
   hbase/checkstyle.xml
   
hbase/checkstyle-suppressions.xml



hbase git commit: HBASE-15801 Upgrade checkstyle for all branches

2016-05-09 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 c732a43df -> 6ba2ada1b


HBASE-15801 Upgrade checkstyle for all branches


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6ba2ada1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6ba2ada1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6ba2ada1

Branch: refs/heads/branch-1.3
Commit: 6ba2ada1bafd8a7bfffa6b09179f0b67622578fe
Parents: c732a43
Author: zhangduo 
Authored: Mon May 9 14:42:27 2016 +0800
Committer: zhangduo 
Committed: Tue May 10 09:56:04 2016 +0800

--
 pom.xml | 15 +--
 1 file changed, 13 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6ba2ada1/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 0d18d25..6f11d0b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -769,13 +769,18 @@
 
   org.apache.maven.plugins
   maven-checkstyle-plugin
-  2.13
+  2.17
   
 
   org.apache.hbase
   hbase-checkstyle
   ${project.version}
 
+
+  com.puppycrawl.tools
+  checkstyle
+  ${checkstyle.version}
+
   
   
 hbase/checkstyle.xml
@@ -891,6 +896,11 @@
 hbase-checkstyle
 ${project.version}
   
+  
+com.puppycrawl.tools
+checkstyle
+${checkstyle.version}
+  
 
 
   hbase/checkstyle.xml
@@ -1166,6 +1176,7 @@
 1.6
 2.4.1
 1.3.9-1
+6.18
 2.10.3
 1.5.2.1
 
@@ -2525,7 +2536,7 @@
   
 org.apache.maven.plugins
 maven-checkstyle-plugin
-2.13
+2.17
 
   hbase/checkstyle.xml
   
hbase/checkstyle-suppressions.xml



hbase git commit: HBASE-15801 Upgrade checkstyle for all branches

2016-05-09 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1 2f282aca1 -> 690b44d29


HBASE-15801 Upgrade checkstyle for all branches


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/690b44d2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/690b44d2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/690b44d2

Branch: refs/heads/branch-1
Commit: 690b44d29a1014ecddd0fee4d56ee9f76bafb03e
Parents: 2f282ac
Author: zhangduo 
Authored: Mon May 9 14:42:27 2016 +0800
Committer: zhangduo 
Committed: Tue May 10 09:37:20 2016 +0800

--
 pom.xml | 15 +--
 1 file changed, 13 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/690b44d2/pom.xml
--
diff --git a/pom.xml b/pom.xml
index b833d66..e6bf6d9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -769,13 +769,18 @@
 
   org.apache.maven.plugins
   maven-checkstyle-plugin
-  2.13
+  2.17
   
 
   org.apache.hbase
   hbase-checkstyle
   ${project.version}
 
+
+  com.puppycrawl.tools
+  checkstyle
+  ${checkstyle.version}
+
   
   
 hbase/checkstyle.xml
@@ -891,6 +896,11 @@
 hbase-checkstyle
 ${project.version}
   
+  
+com.puppycrawl.tools
+checkstyle
+${checkstyle.version}
+  
 
 
   hbase/checkstyle.xml
@@ -1166,6 +1176,7 @@
 1.6
 2.4.1
 1.3.9-1
+6.18
 2.10.3
 1.5.2.1
 
@@ -2525,7 +2536,7 @@
   
 org.apache.maven.plugins
 maven-checkstyle-plugin
-2.13
+2.17
 
   hbase/checkstyle.xml
   
hbase/checkstyle-suppressions.xml



hbase git commit: HBASE-15801 Upgrade checkstyle for all branches

2016-05-09 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 9d17e3d22 -> 224b03b2a


HBASE-15801 Upgrade checkstyle for all branches


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/224b03b2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/224b03b2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/224b03b2

Branch: refs/heads/master
Commit: 224b03b2a5258c819988cd02363bba7b4e0121f2
Parents: 9d17e3d
Author: zhangduo 
Authored: Mon May 9 14:38:51 2016 +0800
Committer: zhangduo 
Committed: Tue May 10 09:33:46 2016 +0800

--
 pom.xml | 11 +++
 1 file changed, 11 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/224b03b2/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 0b3c61a..6652b78 100644
--- a/pom.xml
+++ b/pom.xml
@@ -883,6 +883,11 @@
   hbase-checkstyle
   ${project.version}
 
+
+  com.puppycrawl.tools
+  checkstyle
+  ${checkstyle.version}
+
   
   
 hbase/checkstyle.xml
@@ -998,6 +1003,11 @@
 hbase-checkstyle
 ${project.version}
   
+  
+com.puppycrawl.tools
+checkstyle
+${checkstyle.version}
+  
 
 
   hbase/checkstyle.xml
@@ -1248,6 +1258,7 @@
 1.8
 2.4.1
 1.3.9-1
+6.18
 2.10.3
 1.5.2.1
 



hbase git commit: HBASE-15797 TestIPCUtil fails after HBASE-15795

2016-05-09 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 11740570c -> 9d17e3d22


HBASE-15797 TestIPCUtil fails after HBASE-15795

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9d17e3d2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9d17e3d2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9d17e3d2

Branch: refs/heads/master
Commit: 9d17e3d22a03dbff99b67837184bfe731049d56f
Parents: 1174057
Author: Jurriaan Mous 
Authored: Sun May 8 08:43:28 2016 +0200
Committer: stack 
Committed: Mon May 9 17:04:58 2016 -0700

--
 .../main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java| 10 ++
 1 file changed, 6 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9d17e3d2/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
index a87bc8d..74466b5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java
@@ -139,16 +139,16 @@ public class IPCUtil {
   baos = new ByteBufferOutputStream(bufferSize);
 }
 Compressor poolCompressor = null;
-try (OutputStream os = baos) {
-  OutputStream os2Compress = os;
+OutputStream os = baos;
+try  {
   if (compressor != null) {
 if (compressor instanceof Configurable) {
   ((Configurable) compressor).setConf(this.conf);
 }
 poolCompressor = CodecPool.getCompressor(compressor);
-os2Compress = compressor.createOutputStream(os, poolCompressor);
+os = compressor.createOutputStream(os, poolCompressor);
   }
-  Codec.Encoder encoder = codec.getEncoder(os2Compress);
+  Codec.Encoder encoder = codec.getEncoder(os);
   int count = 0;
   while (cellScanner.advance()) {
 encoder.write(cellScanner.current());
@@ -163,6 +163,8 @@ public class IPCUtil {
 } catch (BufferOverflowException e) {
   throw new DoNotRetryIOException(e);
 } finally {
+  os.close();
+
   if (poolCompressor != null) {
 CodecPool.returnCompressor(poolCompressor);
   }



hbase git commit: HBASE-15236 Inconsistent cell reads over multiple bulk-loaded HFiles. In KeyValueHeap, if two cells are same i.e. have same key and timestamp, then instead of directly using seq id t

2016-05-09 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master a6e29676d -> 11740570c


HBASE-15236 Inconsistent cell reads over multiple bulk-loaded HFiles. In 
KeyValueHeap, if two cells are same i.e. have same key and timestamp, then 
instead of directly using seq id to determine newer one, we should use 
StoreFile.Comparater.SEQ_ID because that's what is used to determine order of 
hfiles. In this patch, we assign each scanner an order based on it's index in 
storefiles list, which is then used in KeyValueHeap to disambiguate between 
same cells. Changes the getSequenceId() in KeyValueScanner class to 
getScannerOrder(). Testing: Adds unit test to TestKeyValueHeap. Manual testing: 
Three cases (Tables t, t2, t3 in the jira description), single region, 2 hfiles 
with same seq id, timestamps and duplicate KVs. Made sure that returned kv was 
same for get and scan. (Apekshit)

Change-Id: I22600c91c0a51fb63eb17db73472839d2f13957c

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/11740570
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/11740570
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/11740570

Branch: refs/heads/master
Commit: 11740570c1440254a76fae67d318c6a852cb56b8
Parents: a6e2967
Author: Apekshit 
Authored: Tue Feb 23 00:31:18 2016 -0800
Committer: stack 
Committed: Mon May 9 16:57:06 2016 -0700

--
 .../hadoop/hbase/regionserver/KeyValueHeap.java |  20 +-
 .../hbase/regionserver/KeyValueScanner.java |  12 +-
 .../hbase/regionserver/MemStoreScanner.java |   7 +-
 .../hbase/regionserver/SegmentScanner.java  |  35 +--
 .../hadoop/hbase/regionserver/StoreFile.java|   6 +-
 .../hbase/regionserver/StoreFileReader.java |  43 +--
 .../hbase/regionserver/StoreFileScanner.java|  38 ++-
 .../hadoop/hbase/regionserver/StoreScanner.java |   5 +-
 .../hbase/util/CollectionBackedScanner.java |   5 +-
 .../hbase/regionserver/TestKeyValueHeap.java| 269 +++
 .../hbase/regionserver/TestStoreFile.java   |   2 +-
 .../regionserver/compactions/TestCompactor.java |   2 +-
 .../compactions/TestStripeCompactionPolicy.java |   3 +-
 13 files changed, 212 insertions(+), 235 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/11740570/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
index 89fc8fb..9ece14b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueHeap.java
@@ -189,17 +189,10 @@ public class KeyValueHeap extends 
NonReversedNonLazyKeyValueScanner
   if (comparison != 0) {
 return comparison;
   } else {
-// Since both the keys are exactly the same, we break the tie in favor
-// of the key which came latest.
-long leftSequenceID = left.getSequenceID();
-long rightSequenceID = right.getSequenceID();
-if (leftSequenceID > rightSequenceID) {
-  return -1;
-} else if (leftSequenceID < rightSequenceID) {
-  return 1;
-} else {
-  return 0;
-}
+// Since both the keys are exactly the same, we break the tie in favor 
of higher ordered
+// scanner since it'll have newer data. Since higher value should come 
first, we reverse
+// sort here.
+return Long.compare(right.getScannerOrder(), left.getScannerOrder());
   }
 }
 /**
@@ -406,8 +399,11 @@ public class KeyValueHeap extends 
NonReversedNonLazyKeyValueScanner
 return this.heap;
   }
 
+  /**
+   * @see KeyValueScanner#getScannerOrder()
+   */
   @Override
-  public long getSequenceID() {
+  public long getScannerOrder() {
 return 0;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/11740570/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java
index ed86a83..44b081b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/KeyValueScanner.java
@@ -70,13 +70,13 @@ public interface KeyValueScanner extends Shipper, Closeable 
{
   boolean reseek(Cell key) 

hbase git commit: HBASE-11625 - Verifies data before building HFileBlock. - Adds HFileBlock.Header class which contains information about location of fields. Testing: Adds CorruptedFSReaderImpl to Tes

2016-05-09 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 4c214b50c -> c732a43df


HBASE-11625 - Verifies data before building HFileBlock. - Adds 
HFileBlock.Header class which contains information about location of fields. 
Testing: Adds CorruptedFSReaderImpl to TestChecksum.

Change-Id: I6777f2ddf8922691c84ca86d0cffa9a37dc879ae

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c732a43d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c732a43d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c732a43d

Branch: refs/heads/branch-1.3
Commit: c732a43df19db60b34dfb083109d9f9c31083223
Parents: 4c214b5
Author: Apekshit 
Authored: Thu May 5 17:05:17 2016 -0700
Committer: stack 
Committed: Mon May 9 16:30:14 2016 -0700

--
 .../hadoop/hbase/io/hfile/ChecksumUtil.java | 53 ++---
 .../hadoop/hbase/io/hfile/HFileBlock.java   | 82 +---
 .../hadoop/hbase/io/hfile/TestChecksum.java | 62 +++
 3 files changed, 125 insertions(+), 72 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c732a43d/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
index b0b1714..a47cc12 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
@@ -78,46 +78,35 @@ public class ChecksumUtil {
   }
 
   /**
-   * Validates that the data in the specified HFileBlock matches the
-   * checksum.  Generates the checksum for the data and
-   * then validate that it matches the value stored in the header.
-   * If there is a checksum mismatch, then return false. Otherwise
-   * return true.
-   * The header is extracted from the specified HFileBlock while the
-   * data-to-be-verified is extracted from 'data'.
+   * Validates that the data in the specified HFileBlock matches the checksum. 
Generates the
+   * checksums for the data and then validate that it matches those stored in 
the end of the data.
+   * @param buffer Contains the data in following order: HFileBlock header, 
data, checksums.
+   * @param pathName Path of the HFile to which the {@code data} belongs. Only 
used for logging.
+   * @param offset offset of the data being validated. Only used for logging.
+   * @param hdrSize Size of the block header in {@code data}. Only used for 
logging.
+   * @return True if checksum matches, else false.
*/
-  static boolean validateBlockChecksum(String pathName, long offset, 
HFileBlock block,
-byte[] data, int hdrSize) throws IOException {
-
-// If this is an older version of the block that does not have
-// checksums, then return false indicating that checksum verification
-// did not succeed. Actually, this method should never be called
-// when the minorVersion is 0, thus this is a defensive check for a
-// cannot-happen case. Since this is a cannot-happen case, it is
-// better to return false to indicate a checksum validation failure.
-if (!block.getHFileContext().isUseHBaseChecksum()) {
-  return false;
-}
-
-// Get a checksum object based on the type of checksum that is
-// set in the HFileBlock header. A ChecksumType.NULL indicates that
-// the caller is not interested in validating checksums, so we
-// always return true.
-ChecksumType cktype = ChecksumType.codeToType(block.getChecksumType());
+  static boolean validateChecksum(ByteBuffer buffer, String pathName, long 
offset, int hdrSize)
+  throws IOException {
+// A ChecksumType.NULL indicates that the caller is not interested in 
validating checksums,
+// so we always return true.
+ChecksumType cktype =
+
ChecksumType.codeToType(buffer.get(HFileBlock.Header.CHECKSUM_TYPE_INDEX));
 if (cktype == ChecksumType.NULL) {
   return true; // No checksum validations needed for this block.
 }
 
 // read in the stored value of the checksum size from the header.
-int bytesPerChecksum = block.getBytesPerChecksum();
+int bytesPerChecksum = 
buffer.getInt(HFileBlock.Header.BYTES_PER_CHECKSUM_INDEX);
 
 DataChecksum dataChecksum = DataChecksum.newDataChecksum(
 cktype.getDataChecksumType(), bytesPerChecksum);
 assert dataChecksum != null;
-int sizeWithHeader =  block.getOnDiskDataSizeWithHeader();
+int onDiskDataSizeWithHeader =
+buffer.getInt(HFileBlock.Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);
 if 

hbase git commit: HBASE-11625 - Verifies data before building HFileBlock. - Adds HFileBlock.Header class which contains information about location of fields. Testing: Adds CorruptedFSReaderImpl to Tes

2016-05-09 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1 d07d31611 -> 2f282aca1


HBASE-11625 - Verifies data before building HFileBlock. - Adds 
HFileBlock.Header class which contains information about location of fields. 
Testing: Adds CorruptedFSReaderImpl to TestChecksum.

Change-Id: I6777f2ddf8922691c84ca86d0cffa9a37dc879ae

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2f282aca
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2f282aca
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2f282aca

Branch: refs/heads/branch-1
Commit: 2f282aca15b7f36573744ff72bde2d8d07f2f384
Parents: d07d316
Author: Apekshit 
Authored: Thu May 5 17:05:17 2016 -0700
Committer: stack 
Committed: Mon May 9 16:29:47 2016 -0700

--
 .../hadoop/hbase/io/hfile/ChecksumUtil.java | 53 ++---
 .../hadoop/hbase/io/hfile/HFileBlock.java   | 82 +---
 .../hadoop/hbase/io/hfile/TestChecksum.java | 62 +++
 3 files changed, 125 insertions(+), 72 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2f282aca/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
index b0b1714..a47cc12 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/ChecksumUtil.java
@@ -78,46 +78,35 @@ public class ChecksumUtil {
   }
 
   /**
-   * Validates that the data in the specified HFileBlock matches the
-   * checksum.  Generates the checksum for the data and
-   * then validate that it matches the value stored in the header.
-   * If there is a checksum mismatch, then return false. Otherwise
-   * return true.
-   * The header is extracted from the specified HFileBlock while the
-   * data-to-be-verified is extracted from 'data'.
+   * Validates that the data in the specified HFileBlock matches the checksum. 
Generates the
+   * checksums for the data and then validate that it matches those stored in 
the end of the data.
+   * @param buffer Contains the data in following order: HFileBlock header, 
data, checksums.
+   * @param pathName Path of the HFile to which the {@code data} belongs. Only 
used for logging.
+   * @param offset offset of the data being validated. Only used for logging.
+   * @param hdrSize Size of the block header in {@code data}. Only used for 
logging.
+   * @return True if checksum matches, else false.
*/
-  static boolean validateBlockChecksum(String pathName, long offset, 
HFileBlock block,
-byte[] data, int hdrSize) throws IOException {
-
-// If this is an older version of the block that does not have
-// checksums, then return false indicating that checksum verification
-// did not succeed. Actually, this method should never be called
-// when the minorVersion is 0, thus this is a defensive check for a
-// cannot-happen case. Since this is a cannot-happen case, it is
-// better to return false to indicate a checksum validation failure.
-if (!block.getHFileContext().isUseHBaseChecksum()) {
-  return false;
-}
-
-// Get a checksum object based on the type of checksum that is
-// set in the HFileBlock header. A ChecksumType.NULL indicates that
-// the caller is not interested in validating checksums, so we
-// always return true.
-ChecksumType cktype = ChecksumType.codeToType(block.getChecksumType());
+  static boolean validateChecksum(ByteBuffer buffer, String pathName, long 
offset, int hdrSize)
+  throws IOException {
+// A ChecksumType.NULL indicates that the caller is not interested in 
validating checksums,
+// so we always return true.
+ChecksumType cktype =
+
ChecksumType.codeToType(buffer.get(HFileBlock.Header.CHECKSUM_TYPE_INDEX));
 if (cktype == ChecksumType.NULL) {
   return true; // No checksum validations needed for this block.
 }
 
 // read in the stored value of the checksum size from the header.
-int bytesPerChecksum = block.getBytesPerChecksum();
+int bytesPerChecksum = 
buffer.getInt(HFileBlock.Header.BYTES_PER_CHECKSUM_INDEX);
 
 DataChecksum dataChecksum = DataChecksum.newDataChecksum(
 cktype.getDataChecksumType(), bytesPerChecksum);
 assert dataChecksum != null;
-int sizeWithHeader =  block.getOnDiskDataSizeWithHeader();
+int onDiskDataSizeWithHeader =
+buffer.getInt(HFileBlock.Header.ON_DISK_DATA_SIZE_WITH_HEADER_INDEX);
 if 

hbase git commit: HBASE-15612: Minor improvements to CellCounter and RowCounter documentation

2016-05-09 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 2eced6f03 -> a6e29676d


HBASE-15612: Minor improvements to CellCounter and RowCounter documentation

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a6e29676
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a6e29676
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a6e29676

Branch: refs/heads/master
Commit: a6e29676db1d2aa8ff40624573bfd93bf356d2b4
Parents: 2eced6f
Author: Esteban Gutierrez 
Authored: Thu Apr 7 10:04:07 2016 -0700
Committer: stack 
Committed: Mon May 9 14:12:43 2016 -0700

--
 .../org/apache/hadoop/hbase/mapreduce/CellCounter.java|  5 +++--
 src/main/asciidoc/_chapters/ops_mgt.adoc  | 10 --
 2 files changed, 11 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a6e29676/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
index 73f9b93..e2af762 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/CellCounter.java
@@ -64,10 +64,11 @@ import com.google.common.base.Preconditions;
  * 6. Total number of versions of each qualifier.
  * 
  *
- * The cellcounter takes two optional parameters one to use a user
+ * The cellcounter can take optional parameters to use a user
  * supplied row/family/qualifier string to use in the report and
  * second a regex based or prefix based row filter to restrict the
- * count operation to a limited subset of rows from the table.
+ * count operation to a limited subset of rows from the table or a
+ * start time and/or end time to limit the count to a time range.
  */
 @InterfaceAudience.Public
 @InterfaceStability.Stable

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6e29676/src/main/asciidoc/_chapters/ops_mgt.adoc
--
diff --git a/src/main/asciidoc/_chapters/ops_mgt.adoc 
b/src/main/asciidoc/_chapters/ops_mgt.adoc
index 4c9c7c5..583a872 100644
--- a/src/main/asciidoc/_chapters/ops_mgt.adoc
+++ b/src/main/asciidoc/_chapters/ops_mgt.adoc
@@ -304,12 +304,15 @@ The following utilities are available:
 `RowCounter`::
   Count rows in an HBase table.
 
+`CellCounter`::
+  Count cells in an HBase table.
+
 `replication.VerifyReplication`::
   Compare the data from tables in two different clusters.
   WARNING: It doesn't work for incrementColumnValues'd cells since the 
timestamp is changed.
   Note that this command is in a different package than the others.
 
-Each command except `RowCounter` accepts a single `--help` argument to print 
usage instructions.
+Each command except `RowCounter` and `CellCounter` accept a single `--help` 
argument to print usage instructions.
 
 [[hbck]]
 === HBase `hbck`
@@ -619,7 +622,8 @@ To NOT run WALPlayer as a mapreduce job on your cluster, 
force it to run all in
 
 
link:http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/mapreduce/RowCounter.html[RowCounter]
is a mapreduce job to count all the rows of a table.
 This is a good utility to use as a sanity check to ensure that HBase can read 
all the blocks of a table if there are any concerns of metadata inconsistency.
-It will run the mapreduce all in a single process but it will run faster if 
you have a MapReduce cluster in place for it to exploit.
+It will run the mapreduce all in a single process but it will run faster if 
you have a MapReduce cluster in place for it to exploit. It is also possible to 
limit
+the time range of data to be scanned by using the `--starttime=[starttime]` 
and `--endtime=[endtime]` flags.
 
 
 $ bin/hbase org.apache.hadoop.hbase.mapreduce.RowCounter  
[ ...]
@@ -642,6 +646,8 @@ The statistics gathered by RowCounter are more fine-grained 
and include:
 
 The program allows you to limit the scope of the run.
 Provide a row regex or prefix to limit the rows to analyze.
+Specify a time range to scan the table by using the `--starttime=[starttime]` 
and `--endtime=[endtime]` flags.
+
 Use `hbase.mapreduce.scan.column.family` to specify scanning a single column 
family.
 
 



hbase git commit: HBASE-15770 Stop using wangle's global executor

2016-05-09 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 7b1786e8f -> 15804a91b


HBASE-15770 Stop using wangle's global executor

Summary: Connection pool and connection factory now get thread pools through 
their constructor. This means that the client has the whole control over the 
threads.

Test Plan: simple-client still writes.

Differential Revision: https://reviews.facebook.net/D57801


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/15804a91
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/15804a91
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/15804a91

Branch: refs/heads/HBASE-14850
Commit: 15804a91b6b4f456fe5f45673b5be04a2e71814d
Parents: 7b1786e
Author: Elliott Clark 
Authored: Fri May 6 14:32:16 2016 -0700
Committer: Elliott Clark 
Committed: Fri May 6 14:34:42 2016 -0700

--
 .../connection/client-dispatcher.h  |  3 ++-
 .../connection/client-handler.cc|  2 +-
 .../connection/connection-factory.cc|  8 +++---
 .../connection/connection-factory.h |  2 +-
 .../connection/connection-pool-test.cc  |  1 +
 .../connection/connection-pool.cc   |  7 ++---
 .../connection/connection-pool.h|  2 +-
 hbase-native-client/core/client.cc  | 23 +++-
 hbase-native-client/core/client.h   |  9 +--
 hbase-native-client/core/location-cache-test.cc |  5 ++--
 hbase-native-client/core/location-cache.cc  | 28 +++-
 hbase-native-client/core/location-cache.h   |  8 --
 hbase-native-client/core/simple-client.cc   |  8 +++---
 hbase-native-client/serde/server-name-test.cc   |  1 -
 14 files changed, 63 insertions(+), 44 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/15804a91/hbase-native-client/connection/client-dispatcher.h
--
diff --git a/hbase-native-client/connection/client-dispatcher.h 
b/hbase-native-client/connection/client-dispatcher.h
index 4bfb35d..2497cc7 100644
--- a/hbase-native-client/connection/client-dispatcher.h
+++ b/hbase-native-client/connection/client-dispatcher.h
@@ -31,7 +31,8 @@
 
 namespace hbase {
 /**
- * Dispatcher that assigns a call_id and then routes the response back to the 
future.
+ * Dispatcher that assigns a call_id and then routes the response back to the
+ * future.
  */
 class ClientDispatcher
 : public wangle::ClientDispatcherBase>(5000)) {}

http://git-wip-us.apache.org/repos/asf/hbase/blob/15804a91/hbase-native-client/connection/connection-factory.cc
--
diff --git a/hbase-native-client/connection/connection-factory.cc 
b/hbase-native-client/connection/connection-factory.cc
index 635d12d..beec6d5 100644
--- a/hbase-native-client/connection/connection-factory.cc
+++ b/hbase-native-client/connection/connection-factory.cc
@@ -19,8 +19,6 @@
 
 #include "connection/connection-factory.h"
 
-#include 
-
 #include "connection/client-dispatcher.h"
 #include "connection/pipeline.h"
 #include "connection/service.h"
@@ -28,9 +26,9 @@
 using namespace folly;
 using namespace hbase;
 
-ConnectionFactory::ConnectionFactory()
-: io_pool_(std::static_pointer_cast(
-  wangle::getIOExecutor())),
+ConnectionFactory::ConnectionFactory(
+std::shared_ptr io_pool)
+: io_pool_(io_pool),
   pipeline_factory_(std::make_shared()) {}
 
 std::shared_ptr

http://git-wip-us.apache.org/repos/asf/hbase/blob/15804a91/hbase-native-client/connection/connection-factory.h
--
diff --git a/hbase-native-client/connection/connection-factory.h 
b/hbase-native-client/connection/connection-factory.h
index 8b6d8d8..fb5d9fe 100644
--- a/hbase-native-client/connection/connection-factory.h
+++ 

hbase git commit: HBASE-15807 - Update report-flakies.py to look for "FAILED" status in test report. - Remove duplicate testcase names in the result

2016-05-09 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master b75b22680 -> 2eced6f03


HBASE-15807 - Update report-flakies.py to look for "FAILED" status in test 
report. - Remove duplicate testcase names in the result

Tested:
Ran it manually.

(Apekshit)

Change-Id: I2a7751eefe729b2a69c0f78596f72b6a0eb39b66

Signed-off-by: stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2eced6f0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2eced6f0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2eced6f0

Branch: refs/heads/master
Commit: 2eced6f0393abbc6ebbe6d37fffe0184e7255457
Parents: b75b226
Author: Apekshit 
Authored: Mon May 9 11:02:06 2016 -0700
Committer: stack 
Committed: Mon May 9 11:51:16 2016 -0700

--
 dev-support/report-flakies.py | 18 --
 1 file changed, 12 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2eced6f0/dev-support/report-flakies.py
--
diff --git a/dev-support/report-flakies.py b/dev-support/report-flakies.py
index 65faa7c..e5e66cc 100755
--- a/dev-support/report-flakies.py
+++ b/dev-support/report-flakies.py
@@ -99,7 +99,8 @@ for job_url in jobs_list:
 bad_tests = set()
 for build in build_id_to_results:
 for test in build_id_to_results[build]:
-if build_id_to_results[build][test] == "REGRESSION":
+if (build_id_to_results[build][test] == "REGRESSION"
+or build_id_to_results[build][test] == "FAILED"):
 bad_tests.add(test)
 global_bad_tests.add(test)
 
@@ -123,7 +124,7 @@ for job_url in jobs_list:
 print "{:>100}  {:6}  {:10}  {:2.0f}%".format(bad_test, fail, 
total, fail*100.0/total)
 else:
 print "No flaky tests founds."
-if len(builds_ids) == len(build_ids_without_result):
+if len(build_ids) == len(build_ids_without_result):
 print "None of the analyzed builds have test result."
 
 print "Builds analyzed: " + str(build_ids)
@@ -131,16 +132,21 @@ for job_url in jobs_list:
 print ""
 
 if args.mvn:
-includes = ""
-excludes = ""
+# There might be multiple tests failing within each TestCase, avoid 
duplication of TestCase names.
+test_cases = set()
 for test in global_bad_tests:
 test = re.sub(".*\.", "", test)  # Remove package name prefix.
 test = re.sub("#.*", "", test)  # Remove individual unittest's name
-includes += test + ","
-excludes += "**/" + test + ".java,"
+test_cases.add(test)
+
+includes = ",".join(test_cases)
 with open("./includes", "w") as inc_file:
 inc_file.write(includes)
 inc_file.close()
+
+excludes = ""
+for test_case in test_cases:
+excludes += "**/" + test_case + ".java,"
 with open("./excludes", "w") as exc_file:
 exc_file.write(excludes)
 exc_file.close()



[3/3] hbase git commit: HBASE-15740 Replication source.shippedKBs metric is undercounting because it is in KB

2016-05-09 Thread enis
HBASE-15740 Replication source.shippedKBs metric is undercounting because it is 
in KB


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4c214b50
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4c214b50
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4c214b50

Branch: refs/heads/branch-1.3
Commit: 4c214b50c6efae02c0eb054b5ddf514469776a9f
Parents: 4ef5b4e
Author: Enis Soztutar 
Authored: Mon May 9 10:25:49 2016 -0700
Committer: Enis Soztutar 
Committed: Mon May 9 10:27:23 2016 -0700

--
 .../MetricsReplicationSourceSource.java |  5 +++-
 .../MetricsReplicationGlobalSourceSource.java   | 25 ++--
 .../MetricsReplicationSourceSourceImpl.java | 13 --
 .../replication/regionserver/MetricsSource.java | 10 
 .../regionserver/ReplicationSource.java |  2 +-
 .../DummyRegionServerEndpointProtos.java| 21 
 6 files changed, 54 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4c214b50/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
index 3aa01ab..271f0ac 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
@@ -24,7 +24,10 @@ public interface MetricsReplicationSourceSource {
   public static final String SOURCE_AGE_OF_LAST_SHIPPED_OP = 
"source.ageOfLastShippedOp";
   public static final String SOURCE_SHIPPED_BATCHES = "source.shippedBatches";
 
+  @Deprecated
+  /** @deprecated Use SOURCE_SHIPPED_BYTES instead */
   public static final String SOURCE_SHIPPED_KBS = "source.shippedKBs";
+  public static final String SOURCE_SHIPPED_BYTES = "source.shippedBytes";
   public static final String SOURCE_SHIPPED_OPS = "source.shippedOps";
 
   public static final String SOURCE_LOG_READ_IN_BYTES = 
"source.logReadInBytes";
@@ -41,7 +44,7 @@ public interface MetricsReplicationSourceSource {
   void incrLogEditsFiltered(long size);
   void incrBatchesShipped(int batches);
   void incrOpsShipped(long ops);
-  void incrShippedKBs(long size);
+  void incrShippedBytes(long size);
   void incrLogReadInBytes(long size);
   void incrLogReadInEdits(long size);
   void clear();

http://git-wip-us.apache.org/repos/asf/hbase/blob/4c214b50/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
index 2526f32..476d2f7 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
@@ -30,6 +30,8 @@ public class MetricsReplicationGlobalSourceSource implements 
MetricsReplicationS
   private final MutableFastCounter logEditsFilteredCounter;
   private final MutableFastCounter shippedBatchesCounter;
   private final MutableFastCounter shippedOpsCounter;
+  private final MutableFastCounter shippedBytesCounter;
+  @Deprecated
   private final MutableFastCounter shippedKBsCounter;
   private final MutableFastCounter logReadInBytesCounter;
   private final MutableFastCounter shippedHFilesCounter;
@@ -48,6 +50,8 @@ public class MetricsReplicationGlobalSourceSource implements 
MetricsReplicationS
 
 shippedKBsCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_SHIPPED_KBS, 0L);
 
+shippedBytesCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_SHIPPED_BYTES, 0L);
+
 logReadInBytesCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_LOG_READ_IN_BYTES, 0L);
 
 logReadInEditsCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_LOG_READ_IN_EDITS, 0L);
@@ -88,8 +92,25 @@ public class MetricsReplicationGlobalSourceSource implements 
MetricsReplicationS
 shippedOpsCounter.incr(ops);
   }
 
-  @Override public void incrShippedKBs(long size) {
-

[2/3] hbase git commit: HBASE-15740 Replication source.shippedKBs metric is undercounting because it is in KB

2016-05-09 Thread enis
HBASE-15740 Replication source.shippedKBs metric is undercounting because it is 
in KB


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d07d3161
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d07d3161
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d07d3161

Branch: refs/heads/branch-1
Commit: d07d31611395ae4e0befc04a0b8cc67adcacacb9
Parents: 137d891
Author: Enis Soztutar 
Authored: Mon May 9 10:25:49 2016 -0700
Committer: Enis Soztutar 
Committed: Mon May 9 10:25:57 2016 -0700

--
 .../MetricsReplicationSourceSource.java |  5 +++-
 .../MetricsReplicationGlobalSourceSource.java   | 25 ++--
 .../MetricsReplicationSourceSourceImpl.java | 13 --
 .../replication/regionserver/MetricsSource.java | 10 
 .../regionserver/ReplicationSource.java |  2 +-
 .../DummyRegionServerEndpointProtos.java| 21 
 6 files changed, 54 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d07d3161/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
index 3aa01ab..271f0ac 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
@@ -24,7 +24,10 @@ public interface MetricsReplicationSourceSource {
   public static final String SOURCE_AGE_OF_LAST_SHIPPED_OP = 
"source.ageOfLastShippedOp";
   public static final String SOURCE_SHIPPED_BATCHES = "source.shippedBatches";
 
+  @Deprecated
+  /** @deprecated Use SOURCE_SHIPPED_BYTES instead */
   public static final String SOURCE_SHIPPED_KBS = "source.shippedKBs";
+  public static final String SOURCE_SHIPPED_BYTES = "source.shippedBytes";
   public static final String SOURCE_SHIPPED_OPS = "source.shippedOps";
 
   public static final String SOURCE_LOG_READ_IN_BYTES = 
"source.logReadInBytes";
@@ -41,7 +44,7 @@ public interface MetricsReplicationSourceSource {
   void incrLogEditsFiltered(long size);
   void incrBatchesShipped(int batches);
   void incrOpsShipped(long ops);
-  void incrShippedKBs(long size);
+  void incrShippedBytes(long size);
   void incrLogReadInBytes(long size);
   void incrLogReadInEdits(long size);
   void clear();

http://git-wip-us.apache.org/repos/asf/hbase/blob/d07d3161/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
index 2526f32..476d2f7 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
@@ -30,6 +30,8 @@ public class MetricsReplicationGlobalSourceSource implements 
MetricsReplicationS
   private final MutableFastCounter logEditsFilteredCounter;
   private final MutableFastCounter shippedBatchesCounter;
   private final MutableFastCounter shippedOpsCounter;
+  private final MutableFastCounter shippedBytesCounter;
+  @Deprecated
   private final MutableFastCounter shippedKBsCounter;
   private final MutableFastCounter logReadInBytesCounter;
   private final MutableFastCounter shippedHFilesCounter;
@@ -48,6 +50,8 @@ public class MetricsReplicationGlobalSourceSource implements 
MetricsReplicationS
 
 shippedKBsCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_SHIPPED_KBS, 0L);
 
+shippedBytesCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_SHIPPED_BYTES, 0L);
+
 logReadInBytesCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_LOG_READ_IN_BYTES, 0L);
 
 logReadInEditsCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_LOG_READ_IN_EDITS, 0L);
@@ -88,8 +92,25 @@ public class MetricsReplicationGlobalSourceSource implements 
MetricsReplicationS
 shippedOpsCounter.incr(ops);
   }
 
-  @Override public void incrShippedKBs(long size) {
-

[1/3] hbase git commit: HBASE-15740 Replication source.shippedKBs metric is undercounting because it is in KB

2016-05-09 Thread enis
Repository: hbase
Updated Branches:
  refs/heads/branch-1 137d891fb -> d07d31611
  refs/heads/branch-1.3 4ef5b4e15 -> 4c214b50c
  refs/heads/master 541d1da5f -> b75b22680


HBASE-15740 Replication source.shippedKBs metric is undercounting because it is 
in KB


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b75b2268
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b75b2268
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b75b2268

Branch: refs/heads/master
Commit: b75b2268046d8b3e872060505260289d8ee43dd5
Parents: 541d1da
Author: Enis Soztutar 
Authored: Mon May 9 10:25:49 2016 -0700
Committer: Enis Soztutar 
Committed: Mon May 9 10:25:49 2016 -0700

--
 .../MetricsReplicationSourceSource.java |  5 +++-
 .../MetricsReplicationGlobalSourceSource.java   | 25 ++--
 .../MetricsReplicationSourceSourceImpl.java | 13 --
 .../replication/regionserver/MetricsSource.java | 10 
 .../regionserver/ReplicationSource.java |  2 +-
 .../DummyRegionServerEndpointProtos.java| 21 
 6 files changed, 54 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b75b2268/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
index 3aa01ab..271f0ac 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSourceSource.java
@@ -24,7 +24,10 @@ public interface MetricsReplicationSourceSource {
   public static final String SOURCE_AGE_OF_LAST_SHIPPED_OP = 
"source.ageOfLastShippedOp";
   public static final String SOURCE_SHIPPED_BATCHES = "source.shippedBatches";
 
+  @Deprecated
+  /** @deprecated Use SOURCE_SHIPPED_BYTES instead */
   public static final String SOURCE_SHIPPED_KBS = "source.shippedKBs";
+  public static final String SOURCE_SHIPPED_BYTES = "source.shippedBytes";
   public static final String SOURCE_SHIPPED_OPS = "source.shippedOps";
 
   public static final String SOURCE_LOG_READ_IN_BYTES = 
"source.logReadInBytes";
@@ -41,7 +44,7 @@ public interface MetricsReplicationSourceSource {
   void incrLogEditsFiltered(long size);
   void incrBatchesShipped(int batches);
   void incrOpsShipped(long ops);
-  void incrShippedKBs(long size);
+  void incrShippedBytes(long size);
   void incrLogReadInBytes(long size);
   void incrLogReadInEdits(long size);
   void clear();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b75b2268/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
index 2526f32..476d2f7 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationGlobalSourceSource.java
@@ -30,6 +30,8 @@ public class MetricsReplicationGlobalSourceSource implements 
MetricsReplicationS
   private final MutableFastCounter logEditsFilteredCounter;
   private final MutableFastCounter shippedBatchesCounter;
   private final MutableFastCounter shippedOpsCounter;
+  private final MutableFastCounter shippedBytesCounter;
+  @Deprecated
   private final MutableFastCounter shippedKBsCounter;
   private final MutableFastCounter logReadInBytesCounter;
   private final MutableFastCounter shippedHFilesCounter;
@@ -48,6 +50,8 @@ public class MetricsReplicationGlobalSourceSource implements 
MetricsReplicationS
 
 shippedKBsCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_SHIPPED_KBS, 0L);
 
+shippedBytesCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_SHIPPED_BYTES, 0L);
+
 logReadInBytesCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_LOG_READ_IN_BYTES, 0L);
 
 logReadInEditsCounter = 
rms.getMetricsRegistry().getCounter(SOURCE_LOG_READ_IN_EDITS, 0L);
@@ -88,8 +92,25 @@ public class 

[45/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
index 227f913..2f3ad05 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
@@ -116,12 +116,14 @@
 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-OrderedString.decode(PositionedByteRangesrc)
+T
+DataType.decode(PositionedByteRangesrc)
+Read an instance of T from the buffer 
src.
+
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]
-Struct.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Number.html?is-external=true;
 title="class or interface in java.lang">Number
+OrderedNumeric.decode(PositionedByteRangesrc)
 
 
 http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
@@ -129,81 +131,79 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
-OrderedInt16.decode(PositionedByteRangesrc)
+RawShort.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true;
 title="class or interface in java.lang">Float
-RawFloat.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object[]
+Struct.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
-RawByte.decode(PositionedByteRangesrc)
+T
+FixedLengthWrapper.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
-OrderedInt64.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
+RawByte.decode(PositionedByteRangesrc)
 
 
-byte[]
-OrderedBlob.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+RawString.decode(PositionedByteRangesrc)
 
 
-T
-DataType.decode(PositionedByteRangesrc)
-Read an instance of T from the buffer 
src.
-
+http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
+OrderedInt8.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true;
 title="class or interface in java.lang">Double
-OrderedFloat64.decode(PositionedByteRangesrc)
+byte[]
+RawBytes.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true;
 title="class or interface in java.lang">Byte
-OrderedInt8.decode(PositionedByteRangesrc)
+T
+TerminatedWrapper.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
-RawInteger.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+OrderedString.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true;
 title="class or interface in java.lang">Float
-OrderedFloat32.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">Long
+OrderedInt64.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
-OrderedInt32.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true;
 title="class or interface in java.lang">Short
+OrderedInt16.decode(PositionedByteRangesrc)
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true;
 title="class or interface in java.lang">Double
-RawDouble.decode(PositionedByteRangesrc)
+byte[]
+OrderedBlobVar.decode(PositionedByteRangesrc)
 
 
-T
-TerminatedWrapper.decode(PositionedByteRangesrc)
+http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer
+RawInteger.decode(PositionedByteRangesrc)
 
 

[07/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html 
b/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
index e95c29a..610bc25 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/Reference.Range.html
@@ -230,7 +230,7 @@ the order they are declared.
 
 
 values
-public staticReference.Range[]values()
+public staticReference.Range[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -247,7 +247,7 @@ for (Reference.Range c : Reference.Range.values())
 
 
 valueOf
-public staticReference.RangevalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticReference.RangevalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index 64d6d2b..7d65a2f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -158,15 +158,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormat.TableSnapshotRecordReader.createKey()
+TableRecordReader.createKey()
 
 
 ImmutableBytesWritable
-TableRecordReaderImpl.createKey()
+TableSnapshotInputFormat.TableSnapshotRecordReader.createKey()
 
 
 ImmutableBytesWritable
-TableRecordReader.createKey()
+TableRecordReaderImpl.createKey()
 
 
 
@@ -214,15 +214,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritablekey,
-  Resultvalue,
-  org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-  org.apache.hadoop.mapred.Reporterreporter)
-Pass the key, value to reduce
-
-
-
-void
 GroupingTableMap.map(ImmutableBytesWritablekey,
   Resultvalue,
   org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
@@ -230,26 +221,35 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Extract the grouping columns from value to construct a new 
key.
 
 
-
+
 void
 RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
   Resultvalues,
   org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
   org.apache.hadoop.mapred.Reporterreporter)
 
+
+void
+IdentityTableMap.map(ImmutableBytesWritablekey,
+  Resultvalue,
+  org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
+  org.apache.hadoop.mapred.Reporterreporter)
+Pass the key, value to reduce
+
+
 
 boolean
-TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
+TableRecordReader.next(ImmutableBytesWritablekey,
 Resultvalue)
 
 
 boolean
-TableRecordReaderImpl.next(ImmutableBytesWritablekey,
+TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
 Resultvalue)
 
 
 boolean
-TableRecordReader.next(ImmutableBytesWritablekey,
+TableRecordReaderImpl.next(ImmutableBytesWritablekey,
 Resultvalue)
 
 
@@ -277,15 +277,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritablekey,
-  Resultvalue,
-  org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-  org.apache.hadoop.mapred.Reporterreporter)
-Pass the key, value to reduce
-
-
-
-void
 GroupingTableMap.map(ImmutableBytesWritablekey,
   Resultvalue,
   org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
@@ -293,13 +284,22 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Extract the grouping columns from value to construct a new 
key.
 
 
-
+
 void
 RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
   Resultvalues,
   org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
   org.apache.hadoop.mapred.Reporterreporter)
 
+
+void

[20/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/ScannerCallable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/ScannerCallable.html 
b/devapidocs/org/apache/hadoop/hbase/client/ScannerCallable.html
index d54fff1..12f7500 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/ScannerCallable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/ScannerCallable.html
@@ -86,6 +86,9 @@
 http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
 
 
+org.apache.hadoop.hbase.client.AbstractRegionServerCallableT
+
+
 org.apache.hadoop.hbase.client.RegionServerCallableResult[]
 
 
@@ -95,12 +98,14 @@
 
 
 
+
+
 
 
 
 
 All Implemented Interfaces:
-RetryingCallableResult[]
+RetryingCallableResult[], RetryingCallableBase
 
 
 Direct Known Subclasses:
@@ -224,11 +229,11 @@ extends 
-
+
 
 
-Fields inherited from classorg.apache.hadoop.hbase.client.RegionServerCallable
-connection,
 location,
 MIN_WAIT_DEAD_SERVER,
 row,
 tableName
+Fields inherited from classorg.apache.hadoop.hbase.client.AbstractRegionServerCallable
+connection,
 location,
 MIN_WAIT_DEAD_SERVER,
 row,
 tableName
 
 
 
@@ -394,7 +399,14 @@ extends RegionServerCallable
-getExceptionMessageAdditionalDetail,
 getLocation,
 getRow,
 getStub,
 getTableName,
 setLocation,
 setStub,
 sleep, throwable
+getStub,
 setClientByServiceName,
 setStub
+
+
+
+
+
+Methods inherited from classorg.apache.hadoop.hbase.client.AbstractRegionServerCallable
+getExceptionMessageAdditionalDetail,
 getLocation,
 getRow,
 getTableName,
 setLocation,
 sleep,
 throwable
 
 
 
@@ -403,6 +415,13 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#clone()"
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#equals(java.lang.Object)"
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#finalize()"
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#getClass()"
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#hashCode()"
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notify()"
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/7/docs/api/java/lang
 /Object.html?is-external=true#notifyAll()" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()"
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait()"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long)"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long,%20int)"
 title="class or interface in java.lang">wait
 
+
+
+
+
+Methods inherited from interfaceorg.apache.hadoop.hbase.client.RetryingCallableBase
+getExceptionMessageAdditionalDetail,
 sleep,
 throwable
+
 
 
 
@@ -683,14 +702,14 @@ extends prepare(booleanreload)
  throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
-Description copied from class:RegionServerCallable
+Description copied from class:AbstractRegionServerCallable
 Prepare for connection to the server hosting region with 
row from tablename.  Does lookup
  to find region location and hosting server.
 
 Specified by:
-preparein
 interfaceRetryingCallableResult[]
+preparein
 interfaceRetryingCallableBase
 Overrides:
-preparein
 classRegionServerCallableResult[]
+preparein
 classAbstractRegionServerCallableResult[]
 Parameters:reload - 
force reload of server location
 Throws:
 http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -837,7 +856,7 @@ extends HRegionInfogetHRegionInfo()
 
 Overrides:
-getHRegionInfoin
 classRegionServerCallableResult[]
+getHRegionInfoin
 classAbstractRegionServerCallableResult[]
 Returns:the HRegionInfo for the 
current region
 
 
@@ -861,8 +880,8 @@ extends ClusterConnectiongetConnection()
 
 Overrides:
-getConnectionin
 classRegionServerCallableResult[]
-Returns:HConnection instance used by 
this Callable.
+getConnectionin
 classAbstractRegionServerCallableResult[]

[19/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/class-use/ClusterConnection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/ClusterConnection.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/ClusterConnection.html
index 719f83e..19e4ebb 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/ClusterConnection.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/ClusterConnection.html
@@ -212,16 +212,16 @@
 
 
 
-(package private) ClusterConnection
-ScannerCallableWithReplicas.cConnection
+protected ClusterConnection
+RpcRetryingCallerWithReadReplicas.cConnection
 
 
 protected ClusterConnection
 ScannerCallable.cConnection
 
 
-protected ClusterConnection
-RpcRetryingCallerWithReadReplicas.cConnection
+(package private) ClusterConnection
+ScannerCallableWithReplicas.cConnection
 
 
 private ClusterConnection
@@ -229,11 +229,11 @@
 
 
 private ClusterConnection
-HBaseAdmin.connection
+HRegionLocator.connection
 
 
-private ClusterConnection
-ClientScanner.connection
+protected ClusterConnection
+BufferedMutatorImpl.connection
 
 
 protected ClusterConnection
@@ -244,16 +244,16 @@
 AsyncProcess.connection
 
 
-private ClusterConnection
-HRegionLocator.connection
+protected ClusterConnection
+HTable.connection
 
 
-protected ClusterConnection
-BufferedMutatorImpl.connection
+private ClusterConnection
+ClientScanner.connection
 
 
-protected ClusterConnection
-HTable.connection
+private ClusterConnection
+HBaseAdmin.connection
 
 
 private ClusterConnection
@@ -281,18 +281,22 @@
 
 
 
-protected ClusterConnection
-ClientScanner.getConnection()
-
-
 (package private) ClusterConnection
 HTableMultiplexer.getConnection()
 
-
+
 ClusterConnection
 ScannerCallable.getConnection()
 
+
+protected ClusterConnection
+ClientScanner.getConnection()
+
 
+(package private) ClusterConnection
+AbstractRegionServerCallable.getConnection()
+
+
 static ClusterConnection
 CoprocessorHConnection.getConnectionForEnvironment(CoprocessorEnvironmentenv)
 Create an HConnection based on the 
environment in which we are running the
@@ -341,13 +345,13 @@
 intreplicaId)
 
 
-static NonceGenerator
-ConnectionUtils.injectNonceGeneratorForTesting(ClusterConnectionconn,
+(package private) static NonceGenerator
+ConnectionImplementation.injectNonceGeneratorForTesting(ClusterConnectionconn,
 NonceGeneratorcnm)
 
 
-(package private) static NonceGenerator
-ConnectionImplementation.injectNonceGeneratorForTesting(ClusterConnectionconn,
+static NonceGenerator
+ConnectionUtils.injectNonceGeneratorForTesting(ClusterConnectionconn,
 NonceGeneratorcnm)
 
 
@@ -641,15 +645,15 @@
 
 
 private ClusterConnection
-MasterCoprocessorRpcChannel.connection
+RegionCoprocessorRpcChannel.connection
 
 
 private ClusterConnection
-RegionServerCoprocessorRpcChannel.connection
+MasterCoprocessorRpcChannel.connection
 
 
 private ClusterConnection
-RegionCoprocessorRpcChannel.connection
+RegionServerCoprocessorRpcChannel.connection
 
 
 
@@ -665,7 +669,9 @@
 
 RegionCoprocessorRpcChannel(ClusterConnectionconn,
   TableNametable,
-  
byte[]row)
+  byte[]row)
+Constructor
+
 
 
 RegionServerCoprocessorRpcChannel(ClusterConnectionconn,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/class-use/CompactType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/CompactType.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/CompactType.html
index 227a6a7..1b49624 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/CompactType.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/CompactType.html
@@ -129,7 +129,7 @@ the order they are declared.
 
 
 void
-HBaseAdmin.compact(TableNametableName,
+Admin.compact(TableNametableName,
   byte[]columnFamily,
   CompactTypecompactType)
 Compact a column family within a table.
@@ -137,7 +137,7 @@ the order they are declared.
 
 
 void
-Admin.compact(TableNametableName,
+HBaseAdmin.compact(TableNametableName,
   byte[]columnFamily,
   CompactTypecompactType)
 Compact a column family within a table.
@@ -145,35 +145,35 @@ the order they are declared.
 
 
 void
-HBaseAdmin.compact(TableNametableName,
+Admin.compact(TableNametableName,
   CompactTypecompactType)
 Compact a table.
 
 
 
 void
-Admin.compact(TableNametableName,
+HBaseAdmin.compact(TableNametableName,
   CompactTypecompactType)
 Compact a table.
 

[01/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 28d625a2a -> 33c287c2e


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/ipc/AsyncRpcChannelImpl.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/AsyncRpcChannelImpl.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/AsyncRpcChannelImpl.html
new file mode 100644
index 000..ebf2136
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/AsyncRpcChannelImpl.html
@@ -0,0 +1,1029 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+AsyncRpcChannelImpl (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.ipc
+Class 
AsyncRpcChannelImpl
+
+
+
+http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.ipc.AsyncRpcChannelImpl
+
+
+
+
+
+
+
+All Implemented Interfaces:
+AsyncRpcChannel
+
+
+
+@InterfaceAudience.Private
+public class AsyncRpcChannelImpl
+extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
+implements AsyncRpcChannel
+Netty RPC channel
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+Nested Classes
+
+Modifier and Type
+Class and Description
+
+
+private static class
+AsyncRpcChannelImpl.CallWriteListener
+Listens to call writes and fails if write failed
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields
+
+Modifier and Type
+Field and Description
+
+
+(package private) http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true;
 title="class or interface in java.net">InetSocketAddress
+address
+
+
+(package private) AuthMethod
+authMethod
+
+
+private io.netty.channel.Channel
+channel
+
+
+private io.netty.util.Timeout
+cleanupTimer
+
+
+(package private) AsyncRpcClient
+client
+
+
+private boolean
+closed
+
+
+private boolean
+connected
+
+
+private int
+failureCounter
+
+
+private static 
org.apache.commons.logging.Log
+LOG
+
+
+private static int
+MAX_SASL_RETRIES
+
+
+(package private) http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+name
+
+
+private http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer,AsyncCall
+pendingCalls
+
+
+private int
+reloginMaxBackoff
+
+
+private http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+serverPrincipal
+
+
+(package private) http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+serviceName
+
+
+(package private) User
+ticket
+
+
+private io.netty.util.TimerTask
+timeoutTask
+
+
+private org.apache.hadoop.security.token.Token? 
extends org.apache.hadoop.security.token.TokenIdentifier
+token
+
+
+protected static http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in 
java.util">Maporg.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind,org.apache.hadoop.security.token.TokenSelector?
 extends org.apache.hadoop.security.token.TokenIdentifier
+TOKEN_HANDDLERS
+
+
+(package private) boolean
+useSasl
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors
+
+Constructor and Description
+
+
+AsyncRpcChannelImpl(io.netty.bootstrap.Bootstrapbootstrap,
+  AsyncRpcClientclient,
+  Userticket,
+  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringserviceName,
+  http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true;
 title="class or interface in 
java.net">InetSocketAddressaddress)
+Constructor for netty RPC channel
+
+
+
+
+
+
+
+
+
+
+Method Summary
+
+Methods
+
+Modifier and Type
+Method and Description
+
+

[17/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
index 45d1e7a..75845f4 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
@@ -338,13 +338,13 @@ service.
 Result.EMPTY_RESULT
 
 
-private Result
-ScannerCallableWithReplicas.lastResult
-
-
 protected Result
 ClientScanner.lastResult
 
+
+private Result
+ScannerCallableWithReplicas.lastResult
+
 
 
 
@@ -398,8 +398,8 @@ service.
 
 
 
-Result[]
-ScannerCallableWithReplicas.call(inttimeout)
+Result
+RpcRetryingCallerWithReadReplicas.ReplicaRegionServerCallable.call(intcallTimeout)
 
 
 Result[]
@@ -410,8 +410,8 @@ service.
 ScannerCallable.call(intcallTimeout)
 
 
-Result
-RpcRetryingCallerWithReadReplicas.ReplicaRegionServerCallable.call(intcallTimeout)
+Result[]
+ScannerCallableWithReplicas.call(inttimeout)
 
 
 (package private) Result[]
@@ -527,25 +527,25 @@ service.
 
 
 Result
-ResultScanner.next()
-Grab the next row's worth of values.
-
+ClientAsyncPrefetchScanner.next()
 
 
 Result
-ClientSmallScanner.next()
+ClientSmallReversedScanner.next()
 
 
 Result
-ClientSimpleScanner.next()
+ClientSmallScanner.next()
 
 
 Result
-ClientAsyncPrefetchScanner.next()
+ClientSimpleScanner.next()
 
 
 Result
-ClientSmallReversedScanner.next()
+ResultScanner.next()
+Grab the next row's worth of values.
+
 
 
 Result
@@ -557,14 +557,14 @@ service.
 
 
 Result[]
-ResultScanner.next(intnbRows)
-
-
-Result[]
 AbstractClientScanner.next(intnbRows)
 Get nbRows rows.
 
 
+
+Result[]
+ResultScanner.next(intnbRows)
+
 
 protected Result
 ClientScanner.nextWithSyncCache()
@@ -887,15 +887,15 @@ service.
 
 
 Result
-TableSnapshotInputFormat.TableSnapshotRecordReader.createValue()
+TableRecordReader.createValue()
 
 
 Result
-TableRecordReaderImpl.createValue()
+TableSnapshotInputFormat.TableSnapshotRecordReader.createValue()
 
 
 Result
-TableRecordReader.createValue()
+TableRecordReaderImpl.createValue()
 
 
 
@@ -943,15 +943,6 @@ service.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritablekey,
-  Resultvalue,
-  org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-  org.apache.hadoop.mapred.Reporterreporter)
-Pass the key, value to reduce
-
-
-
-void
 GroupingTableMap.map(ImmutableBytesWritablekey,
   Resultvalue,
   org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
@@ -959,26 +950,35 @@ service.
 Extract the grouping columns from value to construct a new 
key.
 
 
-
+
 void
 RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
   Resultvalues,
   org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
   org.apache.hadoop.mapred.Reporterreporter)
 
+
+void
+IdentityTableMap.map(ImmutableBytesWritablekey,
+  Resultvalue,
+  org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
+  org.apache.hadoop.mapred.Reporterreporter)
+Pass the key, value to reduce
+
+
 
 boolean
-TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
+TableRecordReader.next(ImmutableBytesWritablekey,
 Resultvalue)
 
 
 boolean
-TableRecordReaderImpl.next(ImmutableBytesWritablekey,
+TableSnapshotInputFormat.TableSnapshotRecordReader.next(ImmutableBytesWritablekey,
 Resultvalue)
 
 
 boolean
-TableRecordReader.next(ImmutableBytesWritablekey,
+TableRecordReaderImpl.next(ImmutableBytesWritablekey,
 Resultvalue)
 
 
@@ -992,15 +992,6 @@ service.
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritablekey,
-  Resultvalue,
-  org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
-  org.apache.hadoop.mapred.Reporterreporter)
-Pass the key, value to reduce
-
-
-
-void
 GroupingTableMap.map(ImmutableBytesWritablekey,
   Resultvalue,
   org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
@@ -1008,13 +999,22 @@ service.
 Extract the grouping columns from value to construct a new 
key.
 
 
-
+
 void
 RowCounter.RowCounterMapper.map(ImmutableBytesWritablerow,
   Resultvalues,
   org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
   org.apache.hadoop.mapred.Reporterreporter)
 
+
+void
+IdentityTableMap.map(ImmutableBytesWritablekey,
+  Resultvalue,
+  org.apache.hadoop.mapred.OutputCollectorImmutableBytesWritable,Resultoutput,
+  org.apache.hadoop.mapred.Reporterreporter)
+Pass the key, value to reduce
+
+
 
 
 
@@ -1043,11 +1043,11 @@ service.
 
 
 private Result
-MultithreadedTableMapper.SubMapRecordReader.value
+TableRecordReaderImpl.value
 
 
 private Result
-TableRecordReaderImpl.value

[23/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/ConnectionCallable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/ConnectionCallable.html 
b/devapidocs/org/apache/hadoop/hbase/client/ConnectionCallable.html
index a101deb..b308204 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/ConnectionCallable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/ConnectionCallable.html
@@ -96,7 +96,7 @@
 Type Parameters:V - 
return type
 
 All Implemented Interfaces:
-http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable, http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true;
 title="class or interface in java.lang">AutoCloseable, RetryingCallableV
+http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable, http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html?is-external=true;
 title="class or interface in java.lang">AutoCloseable, RetryingCallableV, RetryingCallableBase
 
 
 
@@ -169,7 +169,7 @@ implements 
 void
 prepare(booleanreload)
-Prepare by setting up any connections to servers, etc., 
ahead of RetryingCallable.call(int)
 invocation.
+Prepare by setting up any connections to servers, etc., 
ahead of call invocation.
 
 
 
@@ -181,7 +181,7 @@ implements void
 throwable(http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwablet,
   booleanretrying)
-Called when RetryingCallable.call(int)
 throws an exception and we are going to retry; take action to
+Called when call throws an exception and we are going to 
retry; take action to
  make it so we succeed on next call (clear caches, do relookup of locations, 
etc.).
 
 
@@ -256,11 +256,11 @@ implements prepare(booleanreload)
  throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
-Description copied from interface:RetryingCallable
-Prepare by setting up any connections to servers, etc., 
ahead of RetryingCallable.call(int)
 invocation.
+Description copied from interface:RetryingCallableBase
+Prepare by setting up any connections to servers, etc., 
ahead of call invocation.
 
 Specified by:
-preparein
 interfaceRetryingCallableV
+preparein
 interfaceRetryingCallableBase
 Parameters:reload - Set 
this to true if need to requery locations
 Throws:
 http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException - e
@@ -291,14 +291,15 @@ implements throwable(http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwablet,
  booleanretrying)
-Description copied from interface:RetryingCallable
-Called when RetryingCallable.call(int)
 throws an exception and we are going to retry; take action to
+Description copied from interface:RetryingCallableBase
+Called when call throws an exception and we are going to 
retry; take action to
  make it so we succeed on next call (clear caches, do relookup of locations, 
etc.).
 
 Specified by:
-throwablein
 interfaceRetryingCallableV
-retrying - True if we are in retrying mode (we are not in 
retrying mode when max
- retries == 1; we ARE in retrying mode if retries  1 even when we are the 
last attempt)
+throwablein
 interfaceRetryingCallableBase
+Parameters:t - throwable 
which was thrownretrying - True if we are in retrying 
mode (we are not in retrying mode when max
+ retries == 1; we ARE in retrying mode if retries  1 even 
when we are the
+ last attempt)
 
 
 
@@ -310,10 +311,10 @@ implements http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetExceptionMessageAdditionalDetail()
 
 Specified by:
-getExceptionMessageAdditionalDetailin
 interfaceRetryingCallableV
+getExceptionMessageAdditionalDetailin
 interfaceRetryingCallableBase
 Returns:Some details from the 
implementation that we would like to add to a terminating
- exception; i.e. a fatal exception is being thrown ending retries and we might 
like to add
- more implementation-specific detail on to the exception being 
thrown.
+ exception; i.e. a fatal exception is being thrown ending retries and 
we might like to
+ add more implementation-specific detail on to the exception being 
thrown.
 
 
 
@@ -326,7 +327,8 @@ implements sleepin
 interfaceRetryingCallableV
+sleepin
 interfaceRetryingCallableBase
+Parameters:pause - time 
to pausetries - amount of tries until till sleep
 Returns:Suggestion on how much to 
sleep between retries
 
 


[34/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
index d2fab47..ccce83d 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
@@ -459,38 +459,38 @@ service.
 
 
 void
-HBaseAdmin.addColumn(TableNametableName,
+Admin.addColumn(TableNametableName,
   HColumnDescriptorcolumnFamily)
 Deprecated.
-Since 2.0. Will be removed in 3.0. Use
- HBaseAdmin.addColumnFamily(TableName,
 HColumnDescriptor) instead.
+As of release 2.0.0.
+ (https://issues.apache.org/jira/browse/HBASE-1989;>HBASE-1989).
+ This will be removed in HBase 3.0.0.
+ Use Admin.addColumnFamily(TableName,
 HColumnDescriptor).
 
 
 
 
 void
-Admin.addColumn(TableNametableName,
+HBaseAdmin.addColumn(TableNametableName,
   HColumnDescriptorcolumnFamily)
 Deprecated.
-As of release 2.0.0.
- (https://issues.apache.org/jira/browse/HBASE-1989;>HBASE-1989).
- This will be removed in HBase 3.0.0.
- Use Admin.addColumnFamily(TableName,
 HColumnDescriptor).
+Since 2.0. Will be removed in 3.0. Use
+ HBaseAdmin.addColumnFamily(TableName,
 HColumnDescriptor) instead.
 
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/7/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-HBaseAdmin.addColumnFamily(TableNametableName,
-  HColumnDescriptorcolumnFamily)
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/7/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 Admin.addColumnFamily(TableNametableName,
   HColumnDescriptorcolumnFamily)
 Add a column family to an existing table.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/7/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
+HBaseAdmin.addColumnFamily(TableNametableName,
+  HColumnDescriptorcolumnFamily)
+
 
 UnmodifyableHTableDescriptor
 UnmodifyableHTableDescriptor.addFamily(HColumnDescriptorfamily)
@@ -499,38 +499,38 @@ service.
 
 
 void
-HBaseAdmin.modifyColumn(TableNametableName,
+Admin.modifyColumn(TableNametableName,
 HColumnDescriptorcolumnFamily)
 Deprecated.
-As of 2.0. Will be removed in 3.0. Use
- HBaseAdmin.modifyColumnFamily(TableName,
 HColumnDescriptor) instead.
+As of release 2.0.0.
+ (https://issues.apache.org/jira/browse/HBASE-1989;>HBASE-1989).
+ This will be removed in HBase 3.0.0.
+ Use Admin.modifyColumnFamily(TableName,
 HColumnDescriptor).
 
 
 
 
 void
-Admin.modifyColumn(TableNametableName,
+HBaseAdmin.modifyColumn(TableNametableName,
 HColumnDescriptorcolumnFamily)
 Deprecated.
-As of release 2.0.0.
- (https://issues.apache.org/jira/browse/HBASE-1989;>HBASE-1989).
- This will be removed in HBase 3.0.0.
- Use Admin.modifyColumnFamily(TableName,
 HColumnDescriptor).
+As of 2.0. Will be removed in 3.0. Use
+ HBaseAdmin.modifyColumnFamily(TableName,
 HColumnDescriptor) instead.
 
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/7/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
-HBaseAdmin.modifyColumnFamily(TableNametableName,
-HColumnDescriptorcolumnFamily)
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/7/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 Admin.modifyColumnFamily(TableNametableName,
 HColumnDescriptorcolumnFamily)
 Modify an existing column family on a table.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/7/docs/api/java/lang/Void.html?is-external=true;
 title="class 

[09/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
index 0de9040..86696e0 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
@@ -144,29 +144,29 @@
 
 
 
-static HRegionInfo
-HRegionInfo.parseFrom(byte[]bytes)
-
-
 static HTableDescriptor
 HTableDescriptor.parseFrom(byte[]bytes)
 
-
+
 static ClusterId
 ClusterId.parseFrom(byte[]bytes)
 
-
+
 static HColumnDescriptor
 HColumnDescriptor.parseFrom(byte[]bytes)
 
-
-static TableDescriptor
-TableDescriptor.parseFrom(byte[]bytes)
-
 
+static HRegionInfo
+HRegionInfo.parseFrom(byte[]bytes)
+
+
 static SplitLogTask
 SplitLogTask.parseFrom(byte[]data)
 
+
+static TableDescriptor
+TableDescriptor.parseFrom(byte[]bytes)
+
 
 static HRegionInfo
 HRegionInfo.parseFrom(byte[]bytes,
@@ -257,145 +257,145 @@
 ByteArrayComparable.parseFrom(byte[]pbBytes)
 
 
-static SingleColumnValueFilter
-SingleColumnValueFilter.parseFrom(byte[]pbBytes)
+static DependentColumnFilter
+DependentColumnFilter.parseFrom(byte[]pbBytes)
 
 
-static QualifierFilter
-QualifierFilter.parseFrom(byte[]pbBytes)
+static PrefixFilter
+PrefixFilter.parseFrom(byte[]pbBytes)
 
 
-static LongComparator
-LongComparator.parseFrom(byte[]pbBytes)
-
-
 static FamilyFilter
 FamilyFilter.parseFrom(byte[]pbBytes)
 
-
-static ColumnRangeFilter
-ColumnRangeFilter.parseFrom(byte[]pbBytes)
-
 
-static ColumnCountGetFilter
-ColumnCountGetFilter.parseFrom(byte[]pbBytes)
+static RegexStringComparator
+RegexStringComparator.parseFrom(byte[]pbBytes)
 
 
-static DependentColumnFilter
-DependentColumnFilter.parseFrom(byte[]pbBytes)
+static WhileMatchFilter
+WhileMatchFilter.parseFrom(byte[]pbBytes)
 
 
-static BinaryComparator
-BinaryComparator.parseFrom(byte[]pbBytes)
+static InclusiveStopFilter
+InclusiveStopFilter.parseFrom(byte[]pbBytes)
 
 
-static ColumnPrefixFilter
-ColumnPrefixFilter.parseFrom(byte[]pbBytes)
+static FirstKeyOnlyFilter
+FirstKeyOnlyFilter.parseFrom(byte[]pbBytes)
 
 
-static FilterList
-FilterList.parseFrom(byte[]pbBytes)
+static TimestampsFilter
+TimestampsFilter.parseFrom(byte[]pbBytes)
 
 
-static FilterWrapper
-FilterWrapper.parseFrom(byte[]pbBytes)
-
-
 static Filter
 Filter.parseFrom(byte[]pbBytes)
 Concrete implementers can signal a failure condition in 
their code by throwing an
  http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException.
 
 
+
+static KeyOnlyFilter
+KeyOnlyFilter.parseFrom(byte[]pbBytes)
+
 
-static WhileMatchFilter
-WhileMatchFilter.parseFrom(byte[]pbBytes)
+static BinaryPrefixComparator
+BinaryPrefixComparator.parseFrom(byte[]pbBytes)
 
 
-static ValueFilter
-ValueFilter.parseFrom(byte[]pbBytes)
+static LongComparator
+LongComparator.parseFrom(byte[]pbBytes)
 
 
-static RowFilter
-RowFilter.parseFrom(byte[]pbBytes)
+static MultipleColumnPrefixFilter
+MultipleColumnPrefixFilter.parseFrom(byte[]pbBytes)
 
 
-static InclusiveStopFilter
-InclusiveStopFilter.parseFrom(byte[]pbBytes)
+static BitComparator
+BitComparator.parseFrom(byte[]pbBytes)
 
 
-static SingleColumnValueExcludeFilter
-SingleColumnValueExcludeFilter.parseFrom(byte[]pbBytes)
+static QualifierFilter
+QualifierFilter.parseFrom(byte[]pbBytes)
 
 
-static PageFilter
-PageFilter.parseFrom(byte[]pbBytes)
+static SubstringComparator
+SubstringComparator.parseFrom(byte[]pbBytes)
 
 
-static TimestampsFilter
-TimestampsFilter.parseFrom(byte[]pbBytes)
+static SkipFilter
+SkipFilter.parseFrom(byte[]pbBytes)
 
 
-static FirstKeyValueMatchingQualifiersFilter
-FirstKeyValueMatchingQualifiersFilter.parseFrom(byte[]pbBytes)
-Deprecated.
-
+static ColumnCountGetFilter
+ColumnCountGetFilter.parseFrom(byte[]pbBytes)
 
 
-static FirstKeyOnlyFilter
-FirstKeyOnlyFilter.parseFrom(byte[]pbBytes)
+static RandomRowFilter
+RandomRowFilter.parseFrom(byte[]pbBytes)
 
 
-static FuzzyRowFilter
-FuzzyRowFilter.parseFrom(byte[]pbBytes)
+static SingleColumnValueExcludeFilter
+SingleColumnValueExcludeFilter.parseFrom(byte[]pbBytes)
 
 
-static SubstringComparator
-SubstringComparator.parseFrom(byte[]pbBytes)
+static FuzzyRowFilter
+FuzzyRowFilter.parseFrom(byte[]pbBytes)
 
 
-static RandomRowFilter
-RandomRowFilter.parseFrom(byte[]pbBytes)
+static SingleColumnValueFilter
+SingleColumnValueFilter.parseFrom(byte[]pbBytes)
 
 
-static MultipleColumnPrefixFilter
-MultipleColumnPrefixFilter.parseFrom(byte[]pbBytes)
+static FilterList
+FilterList.parseFrom(byte[]pbBytes)
 
 
-static KeyOnlyFilter
-KeyOnlyFilter.parseFrom(byte[]pbBytes)
+static ColumnRangeFilter

[39/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index bade270..5cd4850 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -370,6 +370,12 @@
 
 AbstractProtobufLogWriter()
 - Constructor for class org.apache.hadoop.hbase.regionserver.wal.AbstractProtobufLogWriter
 
+AbstractRegionServerCallableT - Class in org.apache.hadoop.hbase.client
+
+Implementations call a RegionServer.
+
+AbstractRegionServerCallable(Connection,
 TableName, byte[]) - Constructor for class 
org.apache.hadoop.hbase.client.AbstractRegionServerCallable
+
 AbstractRpcClient - Class in org.apache.hadoop.hbase.ipc
 
 Provides the basics for a RpcClient implementation like 
configuration and Logging.
@@ -1845,7 +1851,7 @@
 
 addRequiredOptWithArg(String,
 String, String) - Method in class org.apache.hadoop.hbase.util.AbstractHBaseTool
 
-address
 - Variable in class org.apache.hadoop.hbase.ipc.AsyncRpcChannel
+address
 - Variable in class org.apache.hadoop.hbase.ipc.AsyncRpcChannelImpl
 
 address
 - Variable in class org.apache.hadoop.hbase.ipc.ConnectionId
 
@@ -3237,11 +3243,11 @@
 
 ASYNC_WAL_LOG_ROLLER_EXITED_CHECK_INTERVAL_MS
 - Static variable in class org.apache.hadoop.hbase.regionserver.wal.AsyncFSWAL
 
-AsyncCall - Class 
in org.apache.hadoop.hbase.ipc
+AsyncCallM extends com.google.protobuf.Message,T - Class in org.apache.hadoop.hbase.ipc
 
 Represents an Async Hbase call and its response.
 
-AsyncCall(EventLoop,
 int, Descriptors.MethodDescriptor, Message, PayloadCarryingRpcController, 
Message, MetricsConnection.CallStats) - Constructor for class 
org.apache.hadoop.hbase.ipc.AsyncCall
+AsyncCall(AsyncRpcChannelImpl,
 int, Descriptors.MethodDescriptor, Message, CellScanner, M, 
MessageConverterM, T, IOExceptionConverter, long, int, 
MetricsConnection.CallStats) - Constructor for class 
org.apache.hadoop.hbase.ipc.AsyncCall
 
 Constructor
 
@@ -3350,19 +3356,23 @@
 
 AsyncProtobufLogWriter.BlockingCompletionHandler()
 - Constructor for class org.apache.hadoop.hbase.regionserver.wal.AsyncProtobufLogWriter.BlockingCompletionHandler
 
-AsyncRpcChannel 
- Class in org.apache.hadoop.hbase.ipc
+AsyncRpcChannel - Interface in org.apache.hadoop.hbase.ipc
+
+Interface for Async Rpc Channels
+
+AsyncRpcChannelImpl - Class in org.apache.hadoop.hbase.ipc
 
 Netty RPC channel
 
-AsyncRpcChannel(Bootstrap,
 AsyncRpcClient, User, String, InetSocketAddress) - Constructor for 
class org.apache.hadoop.hbase.ipc.AsyncRpcChannel
+AsyncRpcChannelImpl(Bootstrap,
 AsyncRpcClient, User, String, InetSocketAddress) - Constructor for 
class org.apache.hadoop.hbase.ipc.AsyncRpcChannelImpl
 
 Constructor for netty RPC channel
 
-AsyncRpcChannel.CallWriteListener - Class in org.apache.hadoop.hbase.ipc
+AsyncRpcChannelImpl.CallWriteListener - Class in org.apache.hadoop.hbase.ipc
 
 Listens to call writes and fails if write failed
 
-AsyncRpcChannel.CallWriteListener(AsyncRpcChannel,
 int) - Constructor for class org.apache.hadoop.hbase.ipc.AsyncRpcChannel.CallWriteListener
+AsyncRpcChannelImpl.CallWriteListener(AsyncRpcChannelImpl,
 int) - Constructor for class org.apache.hadoop.hbase.ipc.AsyncRpcChannelImpl.CallWriteListener
 
 AsyncRpcClient 
- Class in org.apache.hadoop.hbase.ipc
 
@@ -3394,7 +3404,7 @@
 
 Handles Hbase responses
 
-AsyncServerResponseHandler(AsyncRpcChannel)
 - Constructor for class org.apache.hadoop.hbase.ipc.AsyncServerResponseHandler
+AsyncServerResponseHandler(AsyncRpcChannelImpl)
 - Constructor for class org.apache.hadoop.hbase.ipc.AsyncServerResponseHandler
 
 Constructor
 
@@ -3565,7 +3575,7 @@
 
 authManager
 - Variable in class org.apache.hadoop.hbase.security.access.ZKPermissionWatcher
 
-authMethod
 - Variable in class org.apache.hadoop.hbase.ipc.AsyncRpcChannel
+authMethod
 - Variable in class org.apache.hadoop.hbase.ipc.AsyncRpcChannelImpl
 
 authMethod
 - Variable in class org.apache.hadoop.hbase.ipc.RpcClientImpl.Connection
 
@@ -5617,7 +5627,7 @@
 
 buildSubprocedure(String,
 byte[]) - Method in class 
org.apache.hadoop.hbase.regionserver.snapshot.RegionServerSnapshotManager.SnapshotSubprocedureBuilder
 
-buildUserInfo(UserGroupInformation,
 AuthMethod) - Method in class org.apache.hadoop.hbase.ipc.AsyncRpcChannel
+buildUserInfo(UserGroupInformation,
 AuthMethod) - Method in class org.apache.hadoop.hbase.ipc.AsyncRpcChannelImpl
 
 Build the user information
 
@@ -6793,7 +6803,7 @@
 
 Make a blocking call.
 
-callBlockingMethod(Descriptors.MethodDescriptor,
 RpcController, Message, Message) - Method in class 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel
+callBlockingMethod(Descriptors.MethodDescriptor,
 RpcController, Message, Message) - Method in class 
org.apache.hadoop.hbase.ipc.SyncCoprocessorRpcChannel
 
 callComplete()
 - Method in class org.apache.hadoop.hbase.ipc.Call
 
@@ -6825,21 +6835,25 @@
 
 

hbase git commit: HBASE-15791 Improve javadoc around ScheduledChore

2016-05-09 Thread jmhsieh
Repository: hbase
Updated Branches:
  refs/heads/branch-1.1 239b80456 -> 1b4e4ea62


HBASE-15791 Improve javadoc around ScheduledChore


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1b4e4ea6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1b4e4ea6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1b4e4ea6

Branch: refs/heads/branch-1.1
Commit: 1b4e4ea62c2ac9c9690a596a9f3bfd27d65a24f9
Parents: 239b804
Author: Jonathan M Hsieh 
Authored: Sat May 7 15:33:20 2016 -0700
Committer: Jonathan M Hsieh 
Committed: Mon May 9 10:00:01 2016 -0700

--
 .../org/apache/hadoop/hbase/ScheduledChore.java | 37 +---
 1 file changed, 25 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1b4e4ea6/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
index 6f49342..71326bf 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
@@ -55,9 +55,9 @@ public abstract class ScheduledChore implements Runnable {
   /**
* Scheduling parameters. Used by ChoreService when scheduling the chore to 
run periodically
*/
-  private final int period;
+  private final int period; // in TimeUnit units
   private final TimeUnit timeUnit;
-  private final long initialDelay;
+  private final long initialDelay; // in TimeUnit units
 
   /**
* Interface to the ChoreService that this ScheduledChore is scheduled with. 
null if the chore is
@@ -68,8 +68,8 @@ public abstract class ScheduledChore implements Runnable {
   /**
* Variables that encapsulate the meaningful state information
*/
-  private long timeOfLastRun = -1;
-  private long timeOfThisRun = -1;
+  private long timeOfLastRun = -1; // system time millis
+  private long timeOfThisRun = -1; // system time millis
   private boolean initialChoreComplete = false;
 
   /**
@@ -127,7 +127,7 @@ public abstract class ScheduledChore implements Runnable {
   /**
* @param name Name assigned to Chore. Useful for identification amongst 
chores of the same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore 
will cancel and cleanup
-   * @param period Period with which this Chore repeats execution when 
scheduled.
+   * @param period Period in millis with which this Chore repeats execution 
when scheduled.
*/
   public ScheduledChore(final String name, Stoppable stopper, final int 
period) {
 this(name, stopper, period, DEFAULT_INITIAL_DELAY);
@@ -136,7 +136,7 @@ public abstract class ScheduledChore implements Runnable {
   /**
* @param name Name assigned to Chore. Useful for identification amongst 
chores of the same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore 
will cancel and cleanup
-   * @param period Period with which this Chore repeats execution when 
scheduled.
+   * @param period Period in millis with which this Chore repeats execution 
when scheduled.
* @param initialDelay Delay before this Chore begins to execute once it has 
been scheduled. A
*  value of 0 means the chore will begin to execute immediately. 
Negative delays are
*  invalid and will be corrected to a value of 0.
@@ -149,10 +149,10 @@ public abstract class ScheduledChore implements Runnable {
   /**
* @param name Name assigned to Chore. Useful for identification amongst 
chores of the same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore 
will cancel and cleanup
-   * @param period Period with which this Chore repeats execution when 
scheduled.
-   * @param initialDelay Delay before this Chore begins to execute once it has 
been scheduled. A
-   *  value of 0 means the chore will begin to execute immediately. 
Negative delays are
-   *  invalid and will be corrected to a value of 0.
+   * @param period Period in Timeunit unit with which this Chore repeats 
execution when scheduled.
+   * @param initialDelay Delay in Timeunit unit before this Chore begins to 
execute once it has been
+   *  scheduled. A value of 0 means the chore will begin to execute 
immediately. Negative
+   *  delays are invalid and will be corrected to a value of 0.
* @param unit The unit that is used to measure period and initialDelay
*/
   public ScheduledChore(final String name, Stoppable stopper, final int period,
@@ -213,8 +213,8 @@ public abstract class ScheduledChore implements Runnable {
   }
 
   /**

hbase git commit: HBASE-15791 Improve javadoc around ScheduledChore

2016-05-09 Thread jmhsieh
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 8a20ba049 -> 4561e6c6c


HBASE-15791 Improve javadoc around ScheduledChore


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4561e6c6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4561e6c6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4561e6c6

Branch: refs/heads/branch-1.2
Commit: 4561e6c6ca7d9cb1e2d859d2eeabc67fef1079c8
Parents: 8a20ba0
Author: Jonathan M Hsieh 
Authored: Sat May 7 15:33:20 2016 -0700
Committer: Jonathan M Hsieh 
Committed: Mon May 9 09:58:44 2016 -0700

--
 .../org/apache/hadoop/hbase/ScheduledChore.java | 37 +---
 1 file changed, 25 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4561e6c6/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
index 2c54b9f..5c5bcd8 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
@@ -55,9 +55,9 @@ public abstract class ScheduledChore implements Runnable {
   /**
* Scheduling parameters. Used by ChoreService when scheduling the chore to 
run periodically
*/
-  private final int period;
+  private final int period; // in TimeUnit units
   private final TimeUnit timeUnit;
-  private final long initialDelay;
+  private final long initialDelay; // in TimeUnit units
 
   /**
* Interface to the ChoreService that this ScheduledChore is scheduled with. 
null if the chore is
@@ -68,8 +68,8 @@ public abstract class ScheduledChore implements Runnable {
   /**
* Variables that encapsulate the meaningful state information
*/
-  private long timeOfLastRun = -1;
-  private long timeOfThisRun = -1;
+  private long timeOfLastRun = -1; // system time millis
+  private long timeOfThisRun = -1; // system time millis
   private boolean initialChoreComplete = false;
 
   /**
@@ -127,7 +127,7 @@ public abstract class ScheduledChore implements Runnable {
   /**
* @param name Name assigned to Chore. Useful for identification amongst 
chores of the same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore 
will cancel and cleanup
-   * @param period Period with which this Chore repeats execution when 
scheduled.
+   * @param period Period in millis with which this Chore repeats execution 
when scheduled.
*/
   public ScheduledChore(final String name, Stoppable stopper, final int 
period) {
 this(name, stopper, period, DEFAULT_INITIAL_DELAY);
@@ -136,7 +136,7 @@ public abstract class ScheduledChore implements Runnable {
   /**
* @param name Name assigned to Chore. Useful for identification amongst 
chores of the same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore 
will cancel and cleanup
-   * @param period Period with which this Chore repeats execution when 
scheduled.
+   * @param period Period in millis with which this Chore repeats execution 
when scheduled.
* @param initialDelay Delay before this Chore begins to execute once it has 
been scheduled. A
*  value of 0 means the chore will begin to execute immediately. 
Negative delays are
*  invalid and will be corrected to a value of 0.
@@ -149,10 +149,10 @@ public abstract class ScheduledChore implements Runnable {
   /**
* @param name Name assigned to Chore. Useful for identification amongst 
chores of the same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore 
will cancel and cleanup
-   * @param period Period with which this Chore repeats execution when 
scheduled.
-   * @param initialDelay Delay before this Chore begins to execute once it has 
been scheduled. A
-   *  value of 0 means the chore will begin to execute immediately. 
Negative delays are
-   *  invalid and will be corrected to a value of 0.
+   * @param period Period in Timeunit unit with which this Chore repeats 
execution when scheduled.
+   * @param initialDelay Delay in Timeunit unit before this Chore begins to 
execute once it has been
+   *  scheduled. A value of 0 means the chore will begin to execute 
immediately. Negative
+   *  delays are invalid and will be corrected to a value of 0.
* @param unit The unit that is used to measure period and initialDelay
*/
   public ScheduledChore(final String name, Stoppable stopper, final int period,
@@ -213,8 +213,8 @@ public abstract class ScheduledChore implements Runnable {
   }
 
   /**

hbase git commit: HBASE-15791 Improve javadoc around ScheduledChore

2016-05-09 Thread jmhsieh
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 fdf117ec6 -> 4ef5b4e15


HBASE-15791 Improve javadoc around ScheduledChore


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4ef5b4e1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4ef5b4e1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4ef5b4e1

Branch: refs/heads/branch-1.3
Commit: 4ef5b4e15099f9a05b72ef23ca1512e230022d9b
Parents: fdf117e
Author: Jonathan M Hsieh 
Authored: Sat May 7 15:33:20 2016 -0700
Committer: Jonathan M Hsieh 
Committed: Mon May 9 09:57:59 2016 -0700

--
 .../org/apache/hadoop/hbase/ScheduledChore.java | 37 +---
 1 file changed, 25 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4ef5b4e1/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
index 2c54b9f..5c5bcd8 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
@@ -55,9 +55,9 @@ public abstract class ScheduledChore implements Runnable {
   /**
* Scheduling parameters. Used by ChoreService when scheduling the chore to 
run periodically
*/
-  private final int period;
+  private final int period; // in TimeUnit units
   private final TimeUnit timeUnit;
-  private final long initialDelay;
+  private final long initialDelay; // in TimeUnit units
 
   /**
* Interface to the ChoreService that this ScheduledChore is scheduled with. 
null if the chore is
@@ -68,8 +68,8 @@ public abstract class ScheduledChore implements Runnable {
   /**
* Variables that encapsulate the meaningful state information
*/
-  private long timeOfLastRun = -1;
-  private long timeOfThisRun = -1;
+  private long timeOfLastRun = -1; // system time millis
+  private long timeOfThisRun = -1; // system time millis
   private boolean initialChoreComplete = false;
 
   /**
@@ -127,7 +127,7 @@ public abstract class ScheduledChore implements Runnable {
   /**
* @param name Name assigned to Chore. Useful for identification amongst 
chores of the same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore 
will cancel and cleanup
-   * @param period Period with which this Chore repeats execution when 
scheduled.
+   * @param period Period in millis with which this Chore repeats execution 
when scheduled.
*/
   public ScheduledChore(final String name, Stoppable stopper, final int 
period) {
 this(name, stopper, period, DEFAULT_INITIAL_DELAY);
@@ -136,7 +136,7 @@ public abstract class ScheduledChore implements Runnable {
   /**
* @param name Name assigned to Chore. Useful for identification amongst 
chores of the same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore 
will cancel and cleanup
-   * @param period Period with which this Chore repeats execution when 
scheduled.
+   * @param period Period in millis with which this Chore repeats execution 
when scheduled.
* @param initialDelay Delay before this Chore begins to execute once it has 
been scheduled. A
*  value of 0 means the chore will begin to execute immediately. 
Negative delays are
*  invalid and will be corrected to a value of 0.
@@ -149,10 +149,10 @@ public abstract class ScheduledChore implements Runnable {
   /**
* @param name Name assigned to Chore. Useful for identification amongst 
chores of the same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore 
will cancel and cleanup
-   * @param period Period with which this Chore repeats execution when 
scheduled.
-   * @param initialDelay Delay before this Chore begins to execute once it has 
been scheduled. A
-   *  value of 0 means the chore will begin to execute immediately. 
Negative delays are
-   *  invalid and will be corrected to a value of 0.
+   * @param period Period in Timeunit unit with which this Chore repeats 
execution when scheduled.
+   * @param initialDelay Delay in Timeunit unit before this Chore begins to 
execute once it has been
+   *  scheduled. A value of 0 means the chore will begin to execute 
immediately. Negative
+   *  delays are invalid and will be corrected to a value of 0.
* @param unit The unit that is used to measure period and initialDelay
*/
   public ScheduledChore(final String name, Stoppable stopper, final int period,
@@ -213,8 +213,8 @@ public abstract class ScheduledChore implements Runnable {
   }
 
   /**

hbase git commit: HBASE-15791 Improve javadoc around ScheduledChore

2016-05-09 Thread jmhsieh
Repository: hbase
Updated Branches:
  refs/heads/branch-1 94c4d568b -> 137d891fb


HBASE-15791 Improve javadoc around ScheduledChore


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/137d891f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/137d891f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/137d891f

Branch: refs/heads/branch-1
Commit: 137d891fbc241b7866bff5476bf8d195d3a9812f
Parents: 94c4d56
Author: Jonathan M Hsieh 
Authored: Sat May 7 15:33:20 2016 -0700
Committer: Jonathan M Hsieh 
Committed: Mon May 9 09:57:31 2016 -0700

--
 .../org/apache/hadoop/hbase/ScheduledChore.java | 37 +---
 1 file changed, 25 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/137d891f/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
index 2c54b9f..5c5bcd8 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
@@ -55,9 +55,9 @@ public abstract class ScheduledChore implements Runnable {
   /**
* Scheduling parameters. Used by ChoreService when scheduling the chore to 
run periodically
*/
-  private final int period;
+  private final int period; // in TimeUnit units
   private final TimeUnit timeUnit;
-  private final long initialDelay;
+  private final long initialDelay; // in TimeUnit units
 
   /**
* Interface to the ChoreService that this ScheduledChore is scheduled with. 
null if the chore is
@@ -68,8 +68,8 @@ public abstract class ScheduledChore implements Runnable {
   /**
* Variables that encapsulate the meaningful state information
*/
-  private long timeOfLastRun = -1;
-  private long timeOfThisRun = -1;
+  private long timeOfLastRun = -1; // system time millis
+  private long timeOfThisRun = -1; // system time millis
   private boolean initialChoreComplete = false;
 
   /**
@@ -127,7 +127,7 @@ public abstract class ScheduledChore implements Runnable {
   /**
* @param name Name assigned to Chore. Useful for identification amongst 
chores of the same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore 
will cancel and cleanup
-   * @param period Period with which this Chore repeats execution when 
scheduled.
+   * @param period Period in millis with which this Chore repeats execution 
when scheduled.
*/
   public ScheduledChore(final String name, Stoppable stopper, final int 
period) {
 this(name, stopper, period, DEFAULT_INITIAL_DELAY);
@@ -136,7 +136,7 @@ public abstract class ScheduledChore implements Runnable {
   /**
* @param name Name assigned to Chore. Useful for identification amongst 
chores of the same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore 
will cancel and cleanup
-   * @param period Period with which this Chore repeats execution when 
scheduled.
+   * @param period Period in millis with which this Chore repeats execution 
when scheduled.
* @param initialDelay Delay before this Chore begins to execute once it has 
been scheduled. A
*  value of 0 means the chore will begin to execute immediately. 
Negative delays are
*  invalid and will be corrected to a value of 0.
@@ -149,10 +149,10 @@ public abstract class ScheduledChore implements Runnable {
   /**
* @param name Name assigned to Chore. Useful for identification amongst 
chores of the same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore 
will cancel and cleanup
-   * @param period Period with which this Chore repeats execution when 
scheduled.
-   * @param initialDelay Delay before this Chore begins to execute once it has 
been scheduled. A
-   *  value of 0 means the chore will begin to execute immediately. 
Negative delays are
-   *  invalid and will be corrected to a value of 0.
+   * @param period Period in Timeunit unit with which this Chore repeats 
execution when scheduled.
+   * @param initialDelay Delay in Timeunit unit before this Chore begins to 
execute once it has been
+   *  scheduled. A value of 0 means the chore will begin to execute 
immediately. Negative
+   *  delays are invalid and will be corrected to a value of 0.
* @param unit The unit that is used to measure period and initialDelay
*/
   public ScheduledChore(final String name, Stoppable stopper, final int period,
@@ -213,8 +213,8 @@ public abstract class ScheduledChore implements Runnable {
   }
 
   /**
-   

[49/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/apidocs/org/apache/hadoop/hbase/client/Consistency.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Consistency.html 
b/apidocs/org/apache/hadoop/hbase/client/Consistency.html
index 51e96d7..31c7ce9 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Consistency.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Consistency.html
@@ -240,7 +240,7 @@ the order they are declared.
 
 
 values
-public staticConsistency[]values()
+public staticConsistency[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -257,7 +257,7 @@ for (Consistency c : Consistency.values())
 
 
 valueOf
-public staticConsistencyvalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticConsistencyvalueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/apidocs/org/apache/hadoop/hbase/client/Durability.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Durability.html 
b/apidocs/org/apache/hadoop/hbase/client/Durability.html
index 5c3c3ea..af1f718 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Durability.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Durability.html
@@ -36,7 +36,7 @@
 
 
 Prev 
Class
-Next 
Class
+Next 
Class
 
 
 Frames
@@ -336,7 +336,7 @@ not permitted.)
 
 
 Prev 
Class
-Next 
Class
+Next 
Class
 
 
 Frames

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/apidocs/org/apache/hadoop/hbase/client/Future.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Future.html 
b/apidocs/org/apache/hadoop/hbase/client/Future.html
new file mode 100644
index 000..9006d23
--- /dev/null
+++ b/apidocs/org/apache/hadoop/hbase/client/Future.html
@@ -0,0 +1,195 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+Future (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Interface FutureV
+
+
+
+
+
+Type Parameters:V - 
Value type
+
+All Superinterfaces:
+http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">FutureV
+
+
+
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public interface FutureV
+extends io.netty.util.concurrent.FutureV
+Promise for responses
+
+
+
+
+
+
+
+
+
+
+
+Method Summary
+
+
+
+
+Methods inherited from interfaceio.netty.util.concurrent.Future
+addListener, addListeners, await, await, await, awaitUninterruptibly, 
awaitUninterruptibly, awaitUninterruptibly, cancel, cause, getNow, 
isCancellable, isSuccess, removeListener, removeListeners, sync, 
syncUninterruptibly
+
+
+
+
+
+Methods inherited from interfacejava.util.concurrent.http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Future
+http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true#get()"
 title="class or interface in java.util.concurrent">get, http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true#get(long,%20java.util.concurrent.TimeUnit)"
 title="class or interface in java.util.concurrent">get, http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true#isCancelled()"
 title="class or interface in java.util.concurrent">isCancelled, http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true#isDone()"
 title="class or interface in java.util.concurrent">isDone
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 

[14/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/package-use.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-use.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-use.html
index f369f4b..ba2c9c4 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-use.html
@@ -392,422 +392,432 @@ service.
 
 
 
+AbstractRegionServerCallable
+Implementations call a RegionServer.
+
+
+
 Action
 A Get, Put, Increment, Append, or Delete associated with 
it's region.
 
 
-
+
 Admin
 The administrative API for HBase.
 
 
-
+
 Append
 Performs Append operations on a single row.
 
 
-
+
 AsyncProcess
 This class  allows a continuous flow of requests.
 
 
-
+
 AsyncProcess.AsyncRequestFuture
 The context used to wait for results from one submit 
call.
 
 
-
+
 AsyncProcess.AsyncRequestFutureImpl
 The context, and return value, for a single 
submit/submitAll call.
 
 
-
+
 AsyncProcess.BatchErrors
 
-
+
 AsyncProcess.ReplicaResultState
 Sync point for calls to multiple replicas for the same user 
request (Get).
 
 
-
+
 AsyncProcess.Retry
 For AsyncRequestFutureImpl#manageError(int, Row, 
Retry, Throwable, ServerName).
 
 
-
+
 Attributes
 
-
+
 BufferedMutator
 Used to communicate with a single HBase table similar to Table but 
meant for
  batched, asynchronous puts.
 
 
-
+
 BufferedMutator.ExceptionListener
 Listens for asynchronous exceptions on a BufferedMutator.
 
 
-
+
 BufferedMutatorImpl
 
  Used to communicate with a single HBase table similar to Table
  but meant for batched, potentially asynchronous puts.
 
 
-
+
 BufferedMutatorParams
 Parameters for instantiating a BufferedMutator.
 
 
-
+
 Cancellable
 This should be implemented by the Get/Scan implementations 
that
  talk to replica regions.
 
 
-
+
 ClientAsyncPrefetchScanner.PrefetchRunnable
 
-
+
 ClientScanner
 Implements the scanner interface for the HBase client.
 
 
-
+
 ClientSideRegionScanner
 A client scanner for a region opened for read-only on the 
client side.
 
 
-
+
 ClientSimpleScanner
 ClientSimpleScanner implements a sync scanner 
behaviour.
 
 
-
+
 ClientSmallScanner.SmallScannerCallableFactory
 
-
+
 ClusterConnection
 Internal methods on Connection that should not be used by 
user code.
 
 
-
+
 ClusterStatusListener
 A class that receives the cluster status, and provide it as 
a set of service to the client.
 
 
-
+
 ClusterStatusListener.DeadServerHandler
 Class to be extended to manage a new dead server.
 
 
-
+
 ClusterStatusListener.Listener
 The interface to be implemented by a listener of a cluster 
status event.
 
 
-
+
 CompactionState
 POJO representing the compaction state
 
 
-
+
 CompactType
 Currently, there are only two compact types:
  NORMAL means do store files compaction;
  MOB means do mob files compaction.
 
 
-
+
 Connection
 A cluster connection encapsulating lower level individual 
connections to actual servers and
  a connection to zookeeper.
 
 
-
+
 ConnectionConfiguration
 Configuration parameters for the connection.
 
 
-
+
 ConnectionImplementation
 Main implementation of Connection 
and ClusterConnection 
interfaces.
 
 
-
+
 ConnectionImplementation.MasterServiceState
 State of the MasterService connection/setup.
 
 
-
+
 ConnectionImplementation.ServerErrorTracker
 The record of errors for servers.
 
 
-
+
 ConnectionImplementation.ServerErrorTracker.ServerErrors
 The record of errors for a server.
 
 
-
+
 ConnectionImplementation.StubMaker
 Makes a client-side stub for master services.
 
 
-
+
 Consistency
 Consistency defines the expected consistency level for an 
operation.
 
 
-
+
 Delete
 Used to perform Delete operations on a single row.
 
 
-
+
 DoNotRetryRegionException
 Similar to RegionException, but disables retries.
 
 
-
+
 Durability
 Enum describing the durability guarantees for tables and Mutations
  Note that the items must be sorted in order of increasing durability
 
 
-
+
 FailureInfo
 Keeps track of repeated failures to any region server.
 
 
-
+
 FastFailInterceptorContext
 
-
+
 Get
 Used to perform Get operations on a single row.
 
 
-
+
 HBaseAdmin
 HBaseAdmin is no longer a client API.
 
 
-
+
 HBaseAdmin.ModifyTableFuture
 
-
+
 HBaseAdmin.ProcedureFuture
 Future that waits on a procedure result.
 
 
-
+
 HBaseAdmin.ProcedureFuture.WaitForStateCallable
 
-
+
 HBaseAdmin.TableFuture
 
-
+
 HConnection
 Deprecated.
 in favor of Connection 
and ConnectionFactory
 
 
 
-
+
 HRegionLocator
 An implementation of RegionLocator.
 
 
-
+
 HTableInterface
 Deprecated.
 use Table 
instead
 
 
 
-
+
 HTableMultiplexer
 HTableMultiplexer provides a thread-safe non blocking PUT 
API across all the tables.
 
 
-
+
 HTableMultiplexer.AtomicAverageCounter
 Helper to count the average over an interval until 
reset.
 
 
-
+
 HTableMultiplexer.FlushWorker
 
-
+
 

[43/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
index eaae149..18172d6 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html
@@ -134,157 +134,173 @@
 126  }
 127} else {
 128  try {
-129scan = new Scan();
-130
-131if (conf.get(SCAN_ROW_START) != 
null) {
-132  
scan.setStartRow(Bytes.toBytesBinary(conf.get(SCAN_ROW_START)));
-133}
+129scan = 
createScanFromConfiguration(conf);
+130  } catch (Exception e) {
+131  
LOG.error(StringUtils.stringifyException(e));
+132  }
+133}
 134
-135if (conf.get(SCAN_ROW_STOP) != 
null) {
-136  
scan.setStopRow(Bytes.toBytesBinary(conf.get(SCAN_ROW_STOP)));
-137}
-138
-139if (conf.get(SCAN_COLUMNS) != 
null) {
-140  addColumns(scan, 
conf.get(SCAN_COLUMNS));
-141}
-142
-143if (conf.get(SCAN_COLUMN_FAMILY) 
!= null) {
-144  
scan.addFamily(Bytes.toBytes(conf.get(SCAN_COLUMN_FAMILY)));
-145}
-146
-147if (conf.get(SCAN_TIMESTAMP) != 
null) {
-148  
scan.setTimeStamp(Long.parseLong(conf.get(SCAN_TIMESTAMP)));
-149}
+135setScan(scan);
+136  }
+137
+138  /**
+139   * Sets up a {@link Scan} instance, 
applying settings from the configuration property
+140   * constants defined in {@code 
TableInputFormat}.  This allows specifying things such as:
+141   * ul
+142   *   listart and stop 
rows/li
+143   *   licolumn qualifiers or 
families/li
+144   *   litimestamps or 
timerange/li
+145   *   liscanner caching and 
batch size/li
+146   * /ul
+147   */
+148  public static Scan 
createScanFromConfiguration(Configuration conf) throws IOException {
+149Scan scan = new Scan();
 150
-151if 
(conf.get(SCAN_TIMERANGE_START) != null  conf.get(SCAN_TIMERANGE_END) 
!= null) {
-152  scan.setTimeRange(
-153  
Long.parseLong(conf.get(SCAN_TIMERANGE_START)),
-154  
Long.parseLong(conf.get(SCAN_TIMERANGE_END)));
-155}
-156
-157if (conf.get(SCAN_MAXVERSIONS) != 
null) {
-158  
scan.setMaxVersions(Integer.parseInt(conf.get(SCAN_MAXVERSIONS)));
-159}
-160
-161if (conf.get(SCAN_CACHEDROWS) != 
null) {
-162  
scan.setCaching(Integer.parseInt(conf.get(SCAN_CACHEDROWS)));
-163}
-164
-165if (conf.get(SCAN_BATCHSIZE) != 
null) {
-166  
scan.setBatch(Integer.parseInt(conf.get(SCAN_BATCHSIZE)));
-167}
-168
-169// false by default, full table 
scans generate too much BC churn
-170
scan.setCacheBlocks((conf.getBoolean(SCAN_CACHEBLOCKS, false)));
-171  } catch (Exception e) {
-172  
LOG.error(StringUtils.stringifyException(e));
-173  }
-174}
-175
-176setScan(scan);
-177  }
-178
-179  @Override
-180  protected void initialize(JobContext 
context) throws IOException {
-181// Do we have to worry about 
mis-matches between the Configuration from setConf and the one
-182// in this context?
-183TableName tableName = 
TableName.valueOf(conf.get(INPUT_TABLE));
-184try {
-185  
initializeTable(ConnectionFactory.createConnection(new Configuration(conf)), 
tableName);
-186} catch (Exception e) {
-187  
LOG.error(StringUtils.stringifyException(e));
-188}
-189  }
-190
-191  /**
-192   * Parses a combined family and 
qualifier and adds either both or just the
-193   * family in case there is no 
qualifier. This assumes the older colon
-194   * divided notation, e.g. 
"family:qualifier".
-195   *
-196   * @param scan The Scan to update.
-197   * @param familyAndQualifier family and 
qualifier
-198   * @throws IllegalArgumentException 
When familyAndQualifier is invalid.
-199   */
-200  private static void addColumn(Scan 
scan, byte[] familyAndQualifier) {
-201byte [][] fq = 
KeyValue.parseColumn(familyAndQualifier);
-202if (fq.length == 1) {
-203  scan.addFamily(fq[0]);
-204} else if (fq.length == 2) {
-205  scan.addColumn(fq[0], fq[1]);
-206} else {
-207  throw new 
IllegalArgumentException("Invalid familyAndQualifier provided.");
-208}
-209  }
-210
-211  /**
-212   * Adds an array of columns specified 
using old format, family:qualifier.
-213   * p
-214   * Overrides previous calls to {@link 
Scan#addColumn(byte[], byte[])}for any families in the
-215   * input.
-216   *
-217   * @param scan The Scan to update.
-218   * @param columns array of columns, 
formatted as codefamily:qualifier/code
-219   * @see Scan#addColumn(byte[], 
byte[])
-220   */
-221  public static void addColumns(Scan 
scan, byte [][] 

[27/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/SplitLogTask.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/SplitLogTask.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/SplitLogTask.html
index 73ded02..0855b61 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/SplitLogTask.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/SplitLogTask.html
@@ -147,19 +147,19 @@
 
 
 void
-SplitLogWorkerCoordination.endTask(SplitLogTaskslt,
+ZkSplitLogWorkerCoordination.endTask(SplitLogTaskslt,
   http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLongctr,
-  SplitLogWorkerCoordination.SplitTaskDetailssplitTaskDetails)
-Notify coordination engine that splitting task has 
completed.
+  SplitLogWorkerCoordination.SplitTaskDetailsdetails)
+endTask() can fail and the only way to recover out of it is 
for the
+ SplitLogManager to timeout the 
task node.
 
 
 
 void
-ZkSplitLogWorkerCoordination.endTask(SplitLogTaskslt,
+SplitLogWorkerCoordination.endTask(SplitLogTaskslt,
   http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLongctr,
-  SplitLogWorkerCoordination.SplitTaskDetailsdetails)
-endTask() can fail and the only way to recover out of it is 
for the
- SplitLogManager to timeout the 
task node.
+  SplitLogWorkerCoordination.SplitTaskDetailssplitTaskDetails)
+Notify coordination engine that splitting task has 
completed.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptor.html
index cb955c2..1c359af 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptor.html
@@ -79,10 +79,6 @@
 
 
 
-org.apache.hadoop.hbase.master.handler
-
-
-
 org.apache.hadoop.hbase.util
 
 
@@ -149,26 +145,6 @@
 
 
 
-
-
-
-Uses of TableDescriptor in org.apache.hadoop.hbase.master.handler
-
-Methods in org.apache.hadoop.hbase.master.handler
 that return TableDescriptor
-
-Modifier and Type
-Method and Description
-
-
-
-TableDescriptor
-TableEventHandler.getTableDescriptor()
-Gets a TableDescriptor from the masterServices.
-
-
-
-
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
index 294d686..615907d 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
@@ -118,11 +118,11 @@
 
 
 TableDescriptors
-HMaster.getTableDescriptors()
+MasterServices.getTableDescriptors()
 
 
 TableDescriptors
-MasterServices.getTableDescriptors()
+HMaster.getTableDescriptors()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/TableExistsException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableExistsException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableExistsException.html
index 4f49372..967b9af 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableExistsException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableExistsException.html
@@ -98,34 +98,29 @@
 
 
 void
-HBaseAdmin.cloneSnapshot(byte[]snapshotName,
-  TableNametableName)
-
-
-void
 Admin.cloneSnapshot(byte[]snapshotName,
   TableNametableName)
 Create a new table by cloning the snapshot content.
 
 
-
+
 void
-HBaseAdmin.cloneSnapshot(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringsnapshotName,
+HBaseAdmin.cloneSnapshot(byte[]snapshotName,
   TableNametableName)
 
-
+
 void
 Admin.cloneSnapshot(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringsnapshotName,
   TableNametableName)
 Create a new table by cloning the snapshot content.
 
 
-

[15/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/package-frame.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-frame.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-frame.html
index bf53a1e..d3ced13 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-frame.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-frame.html
@@ -21,6 +21,7 @@
 ClusterStatusListener.DeadServerHandler
 ClusterStatusListener.Listener
 Connection
+Future
 HBaseAdmin.ProcedureFuture.WaitForStateCallable
 HConnection
 HTableInterface
@@ -29,8 +30,10 @@
 NonceGenerator
 RegionLocator
 Registry
+ResponseFutureListener
 ResultScanner
 RetryingCallable
+RetryingCallableBase
 Row
 RpcRetryingCaller
 StatisticTrackable
@@ -39,6 +42,7 @@
 Classes
 
 AbstractClientScanner
+AbstractRegionServerCallable
 Action
 Append
 AsyncProcess

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
index 2fe..94a5001 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-summary.html
@@ -140,52 +140,64 @@
 
 
 
+FutureV
+
+Promise for responses
+
+
+
 HBaseAdmin.ProcedureFuture.WaitForStateCallable
 
 
-
+
 HConnection
 Deprecated
 in favor of Connection 
and ConnectionFactory
 
 
-
+
 HTableInterface
 Deprecated
 use Table 
instead
 
 
-
+
 MasterKeepAliveConnection
 
 A KeepAlive connection is not physically closed immediately 
after the close,
   but rather kept alive for a few minutes.
 
 
-
+
 MetricsConnection.NewMetricT
 
 A lambda for dispatching to the appropriate metric factory 
method
 
 
-
+
 NonceGenerator
 
 NonceGenerator interface.
 
 
-
+
 RegionLocator
 
 Used to view region location information for a single HBase 
table.
 
 
-
+
 Registry
 
 Cluster registry.
 
 
+
+ResponseFutureListenerV
+
+Specific interface for the Response future listener
+
+
 
 ResultScanner
 
@@ -199,22 +211,28 @@
 
 
 
+RetryingCallableBase
+
+All generic methods for a Callable that can be 
retried.
+
+
+
 Row
 
 Has a row.
 
 
-
+
 RpcRetryingCallerT
 
 
-
+
 StatisticTrackable
 
 Parent interface for an object to get updates about 
per-region statistics.
 
 
-
+
 Table
 
 Used to communicate with a single HBase table.
@@ -238,34 +256,40 @@
 
 
 
+AbstractRegionServerCallableT
+
+Implementations call a RegionServer.
+
+
+
 ActionR
 
 A Get, Put, Increment, Append, or Delete associated with 
it's region.
 
 
-
+
 Append
 
 Performs Append operations on a single row.
 
 
-
+
 AsyncProcess
 
 This class  allows a continuous flow of requests.
 
 
-
+
 AsyncProcess.BatchErrors
 
 
-
+
 AsyncProcess.ReplicaResultState
 
 Sync point for calls to multiple replicas for the same user 
request (Get).
 
 
-
+
 BufferedMutatorImpl
 
 
@@ -273,479 +297,479 @@
  but meant for batched, potentially asynchronous puts.
 
 
-
+
 BufferedMutatorParams
 
 Parameters for instantiating a BufferedMutator.
 
 
-
+
 ClientAsyncPrefetchScanner
 
 ClientAsyncPrefetchScanner implements async scanner 
behaviour.
 
 
-
+
 ClientIdGenerator
 
 The class that is able to determine some unique strings for 
the client,
  such as an IP address, PID, and composite deterministic ID.
 
 
-
+
 ClientScanner
 
 Implements the scanner interface for the HBase client.
 
 
-
+
 ClientSideRegionScanner
 
 A client scanner for a region opened for read-only on the 
client side.
 
 
-
+
 ClientSimpleScanner
 
 ClientSimpleScanner implements a sync scanner 
behaviour.
 
 
-
+
 ClientSmallReversedScanner
 
 
  Client scanner for small reversed scan.
 
 
-
+
 ClientSmallScanner
 
 Client scanner for small scan.
 
 
-
+
 ClientSmallScanner.SmallScannerCallable
 
 
-
+
 ClientSmallScanner.SmallScannerCallableFactory
 
 
-
+
 ClusterStatusListener
 
 A class that receives the cluster status, and provide it as 
a set of service to the client.
 
 
-
+
 ConnectionCallableV
 
 A RetryingCallable for generic connection operations.
 
 
-
+
 ConnectionConfiguration
 
 Configuration parameters for the connection.
 
 
-
+
 ConnectionFactory
 
 A non-instantiable class that manages creation of Connections.
 
 
-
+
 ConnectionImplementation
 
 Main implementation of Connection 
and ClusterConnection 
interfaces.
 
 
-
+
 ConnectionImplementation.MasterServiceState
 
 State of the MasterService connection/setup.
 
 
-
+
 ConnectionImplementation.NoNonceGenerator
 
 Dummy nonce generator for disabled nonces.
 
 
-
+
 ConnectionImplementation.ServerErrorTracker
 
 The record of errors for servers.
 
 
-
+
 ConnectionImplementation.ServerErrorTracker.ServerErrors
 
 The record of errors for a 

[25/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
index 65d17de..74fe4e6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
@@ -100,14 +100,14 @@
 
 
 void
-HMaster.checkTableModifiable(TableNametableName)
-
-
-void
 MasterServices.checkTableModifiable(TableNametableName)
 Check table is modifiable; i.e.
 
 
+
+void
+HMaster.checkTableModifiable(TableNametableName)
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
index f7e60ae..3a9942d 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
@@ -171,14 +171,14 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-HMaster.checkTableModifiable(TableNametableName)
-
-
-void
 MasterServices.checkTableModifiable(TableNametableName)
 Check table is modifiable; i.e.
 
 
+
+void
+HMaster.checkTableModifiable(TableNametableName)
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
index 8a4b480..c9d1384 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
@@ -581,13 +581,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
-VisibilityExpressionResolver.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringvisExpression)
-Convert visibility expression into tags to be 
serialized.
-
+DefaultVisibilityExpressionResolver.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringvisExpression)
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
-DefaultVisibilityExpressionResolver.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringvisExpression)
+VisibilityExpressionResolver.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringvisExpression)
+Convert visibility expression into tags to be 
serialized.
+
 
 
 
@@ -767,18 +767,18 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
-DefaultVisibilityLabelServiceImpl.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringvisExpression,
-  
booleanwithSerializationFormat,
-  
booleancheckAuths)
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
 VisibilityLabelService.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringvisExpression,
   
booleanwithSerializationFormat,
   booleancheckAuths)
 Creates tags corresponding to given visibility 
expression.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
+DefaultVisibilityLabelServiceImpl.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringvisExpression,
+  

[42/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 94b..f75f74f 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -280,10 +280,10 @@
 Warnings
 Errors
 
-1756
+1763
 0
 0
-12365
+12214
 
 Files
 
@@ -791,7 +791,7 @@
 org/apache/hadoop/hbase/client/MultiServerCallable.java
 0
 0
-8
+7
 
 org/apache/hadoop/hbase/client/Mutation.java
 0
@@ -863,5126 +863,5011 @@
 0
 2
 
-org/apache/hadoop/hbase/client/RetryingCallable.java
-0
-0
-6
-
 org/apache/hadoop/hbase/client/RetryingCallerInterceptor.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/ReversedClientScanner.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/client/ReversedScannerCallable.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/client/RowMutations.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/RpcRetryingCaller.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/RpcRetryingCallerFactory.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/RpcRetryingCallerImpl.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/RpcRetryingCallerWithReadReplicas.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/Scan.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/ScannerCallable.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/ScannerCallableWithReplicas.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/ScannerTimeoutException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ServerStatisticTracker.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/Table.java
 0
 0
 29
-
+
 org/apache/hadoop/hbase/client/TableSnapshotScanner.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/TableState.java
 0
 0
 34
-
+
 org/apache/hadoop/hbase/client/UnmodifyableHTableDescriptor.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ZooKeeperRegistry.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicyFactory.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/backoff/ServerStatistics.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/coprocessor/AggregationClient.java
 0
 0
 96
-
+
 org/apache/hadoop/hbase/client/coprocessor/DoubleColumnInterpreter.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/coprocessor/LongColumnInterpreter.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/coprocessor/RowProcessorClient.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/coprocessor/package-info.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/example/BufferedMutatorExample.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/example/MultiThreadedClientExample.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/metrics/ScanMetrics.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/client/metrics/ServerSideScanMetrics.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/client/replication/ReplicationAdmin.java
 0
 0
 23
-
+
 org/apache/hadoop/hbase/client/replication/ReplicationSerDeHelper.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/codec/BaseDecoder.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/codec/BaseEncoder.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/codec/CellCodec.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/codec/CellCodecWithTags.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/codec/MessageCodec.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeBlockMeta.java
 0
 0
 103
-
+
 org/apache/hadoop/hbase/codec/prefixtree/PrefixTreeCodec.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/codec/prefixtree/decode/DecoderFactory.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayReversibleScanner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArrayScanner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/codec/prefixtree/decode/PrefixTreeArraySearcher.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/codec/prefixtree/decode/row/RowNodeReader.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/codec/prefixtree/encode/EncoderFactory.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/codec/prefixtree/encode/PrefixTreeEncoder.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/codec/prefixtree/encode/column/ColumnSectionWriter.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/codec/prefixtree/encode/other/LongEncoder.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/codec/prefixtree/encode/row/RowSectionWriter.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/Tokenizer.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/codec/prefixtree/encode/tokenize/TokenizerNode.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/codec/prefixtree/scanner/CellSearcher.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/codec/prefixtree/scanner/ReversibleCellScanner.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/conf/ConfigurationManager.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/constraint/BaseConstraint.java
 0
 0
 1
-
+
 

[21/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/ResponseFutureListener.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/ResponseFutureListener.html 
b/devapidocs/org/apache/hadoop/hbase/client/ResponseFutureListener.html
new file mode 100644
index 000..3a79100
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/client/ResponseFutureListener.html
@@ -0,0 +1,187 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+
+
+ResponseFutureListener (Apache HBase 2.0.0-SNAPSHOT API)
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Interface 
ResponseFutureListenerV
+
+
+
+
+
+Type Parameters:V - 
Value type.
+
+All Superinterfaces:
+http://docs.oracle.com/javase/7/docs/api/java/util/EventListener.html?is-external=true;
 title="class or interface in java.util">EventListener, 
io.netty.util.concurrent.GenericFutureListenerFutureV
+
+
+
+@InterfaceAudience.Private
+public interface ResponseFutureListenerV
+extends io.netty.util.concurrent.GenericFutureListenerFutureV
+Specific interface for the Response future listener
+
+
+
+
+
+
+
+
+
+
+
+Method Summary
+
+
+
+
+Methods inherited from 
interfaceio.netty.util.concurrent.GenericFutureListener
+operationComplete
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev 
Class
+Next 
Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary:
+Nested|
+Field|
+Constr|
+Method
+
+
+Detail:
+Field|
+Constr|
+Method
+
+
+
+
+
+
+Copyright  20072016 http://www.apache.org/;>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/Result.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Result.html 
b/devapidocs/org/apache/hadoop/hbase/client/Result.html
index 46db5ed..1038140 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Result.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Result.html
@@ -35,7 +35,7 @@
 
 
 
-Prev 
Class
+Prev 
Class
 Next 
Class
 
 
@@ -1491,7 +1491,7 @@ publicvoid
 
-Prev 
Class
+Prev 
Class
 Next 
Class
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/RetryingCallable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/RetryingCallable.html 
b/devapidocs/org/apache/hadoop/hbase/client/RetryingCallable.html
index 7593bd8..9029ed6 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/RetryingCallable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/RetryingCallable.html
@@ -36,7 +36,7 @@
 
 
 Prev 
Class
-Next 
Class
+Next 
Class
 
 
 Frames
@@ -87,15 +87,20 @@
 
 Type Parameters:T - 
result class from executing this
 
+All Superinterfaces:
+RetryingCallableBase
+
+
 All Known Implementing Classes:
-ClientSmallScanner.SmallScannerCallable, ConnectionCallable, DelegatingRetryingCallable, FlushRegionCallable, MasterCallable, MultiServerCallable, PayloadCarryingServerCallable, RegionAdminServiceCallable, RegionReplicaReplicationEndpoint.RegionReplicaReplayCallable,
 RegionServerCallable, ReversedScannerCallable, 
RpcRetryingCallerWithReadReplicas.ReplicaRegionServerCallable,
 ScannerCallable, ScannerCallableWithReplicas, ScannerCallableWithReplicas.RetryingRPC, WALEditsReplaySink.ReplayServerCallable
+AbstractRegionServerCallable, ClientSmallScanner.SmallScannerCallable, ConnectionCallable, DelegatingRetryingCallable, FlushRegionCallable, MasterCallable, MultiServerCallable, PayloadCarryingServerCallable, RegionAdminServiceCallable, RegionReplicaReplicationEndpoint.RegionReplicaReplayCallable,
 RegionServerCallable, ReversedScannerCallable, RpcRetryingCallerWithReadReplicas.ReplicaRegionServerCallable,
 ScannerCallable, ScannerCallableWithReplicas, ScannerCallableWithReplicas.RetryingRPC, 
 WALEditsReplaySink.ReplayServerCallable
 
 
 
 

[13/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogWorkerCoordination.SplitTaskDetails.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogWorkerCoordination.SplitTaskDetails.html
 
b/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogWorkerCoordination.SplitTaskDetails.html
index ff895bf..5343b6b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogWorkerCoordination.SplitTaskDetails.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coordination/class-use/SplitLogWorkerCoordination.SplitTaskDetails.html
@@ -116,19 +116,19 @@
 
 
 void
-SplitLogWorkerCoordination.endTask(SplitLogTaskslt,
+ZkSplitLogWorkerCoordination.endTask(SplitLogTaskslt,
   http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLongctr,
-  SplitLogWorkerCoordination.SplitTaskDetailssplitTaskDetails)
-Notify coordination engine that splitting task has 
completed.
+  SplitLogWorkerCoordination.SplitTaskDetailsdetails)
+endTask() can fail and the only way to recover out of it is 
for the
+ SplitLogManager to timeout the 
task node.
 
 
 
 void
-ZkSplitLogWorkerCoordination.endTask(SplitLogTaskslt,
+SplitLogWorkerCoordination.endTask(SplitLogTaskslt,
   http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLongctr,
-  SplitLogWorkerCoordination.SplitTaskDetailsdetails)
-endTask() can fail and the only way to recover out of it is 
for the
- SplitLogManager to timeout the 
task node.
+  SplitLogWorkerCoordination.SplitTaskDetailssplitTaskDetails)
+Notify coordination engine that splitting task has 
completed.
 
 
 



[50/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index a5333e3..9ed6623 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -913,23 +913,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Increment
-Increment.add(Cellcell)
-Add the specified KeyValue to this operation.
-
-
-
 Put
 Put.add(Cellkv)
 Add the specified KeyValue to this Put operation.
 
 
-
+
 Append
 Append.add(Cellcell)
 Add column and value to this Append operation.
 
 
+
+Increment
+Increment.add(Cellcell)
+Add the specified KeyValue to this operation.
+
+
 
 Delete
 Delete.addDeleteMarker(Cellkv)
@@ -1007,16 +1007,16 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 booleanpartial)
 
 
-Increment
-Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+Put
+Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 
 
-Delete
-Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+Append
+Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 
 
-Put
-Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+Increment
+Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 
 
 Mutation
@@ -1025,8 +1025,8 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Append
-Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+Delete
+Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 
 
 
@@ -1044,25 +1044,24 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Cell
-FuzzyRowFilter.getNextCellHint(CellcurrentCell)
+MultiRowRangeFilter.getNextCellHint(CellcurrentKV)
 
 
 Cell
-ColumnPaginationFilter.getNextCellHint(Cellcell)
+ColumnPrefixFilter.getNextCellHint(Cellcell)
 
 
 Cell
-ColumnPrefixFilter.getNextCellHint(Cellcell)
+FuzzyRowFilter.getNextCellHint(CellcurrentCell)
 
 
 Cell
-ColumnRangeFilter.getNextCellHint(Cellcell)
+MultipleColumnPrefixFilter.getNextCellHint(Cellcell)
 
 
-abstract Cell
-Filter.getNextCellHint(CellcurrentCell)
-If the filter returns the match code SEEK_NEXT_USING_HINT, 
then it should also tell which is
- the next key it must seek to.
+Cell
+TimestampsFilter.getNextCellHint(CellcurrentCell)
+Pick the next cell that the scanner should seek to.
 
 
 
@@ -1071,17 +1070,18 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Cell
-TimestampsFilter.getNextCellHint(CellcurrentCell)
-Pick the next cell that the scanner should seek to.
-
+ColumnPaginationFilter.getNextCellHint(Cellcell)
 
 
 Cell
-MultiRowRangeFilter.getNextCellHint(CellcurrentKV)
+ColumnRangeFilter.getNextCellHint(Cellcell)
 
 
-Cell
-MultipleColumnPrefixFilter.getNextCellHint(Cellcell)
+abstract Cell

[24/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Evolving.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Evolving.html
 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Evolving.html
index 4bae063..044d5a9 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Evolving.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/classification/class-use/InterfaceStability.Evolving.html
@@ -655,122 +655,128 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 
+interface
+FutureV
+Promise for responses
+
+
+
 class
 HBaseAdmin
 HBaseAdmin is no longer a client API.
 
 
-
+
 protected static class
 HBaseAdmin.NamespaceFuture
 
-
+
 protected static class
 HBaseAdmin.ProcedureFutureV
 Future that waits on a procedure result.
 
 
-
+
 protected static class
 HBaseAdmin.TableFutureV
 
-
+
 class
 HTableMultiplexer
 HTableMultiplexer provides a thread-safe non blocking PUT 
API across all the tables.
 
 
-
+
 static class
 HTableMultiplexer.HTableMultiplexerStatus
 HTableMultiplexerStatus keeps track of the current status 
of the HTableMultiplexer.
 
 
-
+
 class
 MasterSwitchType
 Represents the master switch type
 
 
-
+
 class
 Mutation
 
-
+
 class
 Operation
 Superclass for any type that maps to a potentially 
application-level query.
 
 
-
+
 class
 OperationWithAttributes
 
-
+
 class
 Query
 
-
+
 interface
 RegionLocator
 Used to view region location information for a single HBase 
table.
 
 
-
+
 class
 RegionOfflineException
 Thrown when a table can not be located
 
 
-
+
 class
 RowMutations
 Performs multiple mutations atomically on a single 
row.
 
 
-
+
 interface
 RpcRetryingCallerT
 
-
+
 class
 SnapshotDescription
 The POJO equivalent of HBaseProtos.SnapshotDescription
 
 
-
+
 class
 SnapshotType
 POJO representing the snapshot type
 
 
-
+
 interface
 Table
 Used to communicate with a single HBase table.
 
 
-
+
 class
 TableSnapshotScanner
 A Scanner which performs a scan over snapshot files.
 
 
-
+
 static class
 TableState.State
 
-
+
 (package private) class
 UnmodifyableHRegionInfo
 
-
+
 class
 UnmodifyableHTableDescriptor
 Read-only table descriptor.
 
 
-
+
 class
 WrongRowIOException
 
@@ -1473,9 +1479,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-class
-CoprocessorRpcChannel
-Base class which provides clients with an RPC connection to
+interface
+CoprocessorRpcChannel
+Base interface which provides clients with an RPC 
connection to
  call coprocessor endpoint Services.
 
 
@@ -1553,14 +1559,21 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 class
-UnsupportedCellCodecException
+SyncCoprocessorRpcChannel
+Base class which provides clients with an RPC connection to
+ call coprocessor endpoint Services.
+
 
 
 class
-UnsupportedCompressionCodecException
+UnsupportedCellCodecException
 
 
 class
+UnsupportedCompressionCodecException
+
+
+class
 WrongVersionException
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
index 74eec57..8d86308 100644
--- a/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/classification/package-tree.html
@@ -80,12 +80,12 @@
 
 Annotation Type Hierarchy
 
+org.apache.hadoop.hbase.classification.InterfaceAudience.Public (implements 
java.lang.annotation.http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
 org.apache.hadoop.hbase.classification.InterfaceStability.Stable (implements 
java.lang.annotation.http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
-org.apache.hadoop.hbase.classification.InterfaceStability.Evolving (implements 
java.lang.annotation.http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
-org.apache.hadoop.hbase.classification.InterfaceAudience.Private (implements 
java.lang.annotation.http://docs.oracle.com/javase/7/docs/api/java/lang/annotation/Annotation.html?is-external=true;
 title="class or interface in java.lang.annotation">Annotation)
 org.apache.hadoop.hbase.classification.InterfaceAudience.LimitedPrivate (implements 

[08/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index fe37f30..5e1dc46 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -124,19 +124,19 @@
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterColumn(Cellcell)
+MultipleColumnPrefixFilter.filterColumn(Cellcell)
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterColumn(Cellcell)
+ColumnPrefixFilter.filterColumn(Cellcell)
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterKeyValue(Cellc)
+DependentColumnFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-QualifierFilter.filterKeyValue(Cellv)
+PrefixFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
@@ -144,99 +144,99 @@
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cellkv)
+WhileMatchFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cellv)
+InclusiveStopFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cellc)
+FirstKeyOnlyFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cellcell)
+TimestampsFilter.filterKeyValue(Cellv)
 
 
-Filter.ReturnCode
-FilterList.filterKeyValue(Cellc)
+abstract Filter.ReturnCode
+Filter.filterKeyValue(Cellv)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 Filter.ReturnCode
-FilterWrapper.filterKeyValue(Cellv)
+KeyOnlyFilter.filterKeyValue(Cellignored)
 
 
-abstract Filter.ReturnCode
-Filter.filterKeyValue(Cellv)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+Filter.ReturnCode
+MultipleColumnPrefixFilter.filterKeyValue(Cellkv)
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterKeyValue(Cellv)
+QualifierFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cellv)
+SkipFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cellv)
+ColumnCountGetFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterKeyValue(Cellv)
+RandomRowFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-PageFilter.filterKeyValue(Cellignored)
+FuzzyRowFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cellv)
+SingleColumnValueFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cellv)
-Deprecated.
-
+FilterList.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterKeyValue(Cellv)
+ColumnRangeFilter.filterKeyValue(Cellkv)
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterKeyValue(Cellc)
+MultiRowRangeFilter.filterKeyValue(Cellignored)
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cellv)
+FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cellv)
+Deprecated.
+
 
 
 Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cellkv)
+ColumnPrefixFilter.filterKeyValue(Cellcell)
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cellignored)
+PageFilter.filterKeyValue(Cellignored)
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cellv)
+RowFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-PrefixFilter.filterKeyValue(Cellv)
+ValueFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cellv)
+FilterWrapper.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterKeyValue(Cellignored)
+ColumnPaginationFilter.filterKeyValue(Cellv)
 
 
 static Filter.ReturnCode
@@ -303,11 +303,11 @@ the order they are declared.
 
 
 Filter.ReturnCode
-VisibilityController.DeleteVersionVisibilityExpressionFilter.filterKeyValue(Cellcell)
+VisibilityLabelFilter.filterKeyValue(Cellcell)
 
 
 Filter.ReturnCode
-VisibilityLabelFilter.filterKeyValue(Cellcell)
+VisibilityController.DeleteVersionVisibilityExpressionFilter.filterKeyValue(Cellcell)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 5556625..435b828 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -178,15 +178,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Scan.setFilter(Filterfilter)
 
 
+Get
+Get.setFilter(Filterfilter)
+
+
 Query
 Query.setFilter(Filterfilter)
 Apply the specified server-side filter when performing the 
Query.
 
 
-
-Get

[16/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallerInterceptorContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallerInterceptorContext.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallerInterceptorContext.html
index 49803de..24c6e4f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallerInterceptorContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RetryingCallerInterceptorContext.html
@@ -132,7 +132,7 @@
 
 
 RetryingCallerInterceptorContext
-NoOpRetryableCallerInterceptor.createEmptyContext()
+PreemptiveFastFailInterceptor.createEmptyContext()
 
 
 abstract RetryingCallerInterceptorContext
@@ -142,33 +142,33 @@
 
 
 RetryingCallerInterceptorContext
-PreemptiveFastFailInterceptor.createEmptyContext()
+NoOpRetryableCallerInterceptor.createEmptyContext()
 
 
-RetryingCallerInterceptorContext
-NoOpRetryingInterceptorContext.prepare(RetryingCallable?callable)
-
-
 abstract RetryingCallerInterceptorContext
-RetryingCallerInterceptorContext.prepare(RetryingCallable?callable)
+RetryingCallerInterceptorContext.prepare(RetryingCallableBasecallable)
 This prepares the context object by populating it with 
information specific
  to the implementation of the RetryingCallerInterceptor 
along with
  which this will be used.
 
 
-
+
 RetryingCallerInterceptorContext
-NoOpRetryingInterceptorContext.prepare(RetryingCallable?callable,
-  inttries)
+NoOpRetryingInterceptorContext.prepare(RetryingCallableBasecallable)
 
-
+
 abstract RetryingCallerInterceptorContext
-RetryingCallerInterceptorContext.prepare(RetryingCallable?callable,
+RetryingCallerInterceptorContext.prepare(RetryingCallableBasecallable,
   inttries)
 Telescopic extension that takes which of the many retries 
we are currently
  in.
 
 
+
+RetryingCallerInterceptorContext
+NoOpRetryingInterceptorContext.prepare(RetryingCallableBasecallable,
+  inttries)
+
 
 
 
@@ -180,7 +180,7 @@
 
 
 void
-NoOpRetryableCallerInterceptor.handleFailure(RetryingCallerInterceptorContextcontext,
+PreemptiveFastFailInterceptor.handleFailure(RetryingCallerInterceptorContextcontext,
   http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwablet)
 
 
@@ -192,12 +192,12 @@
 
 
 void
-PreemptiveFastFailInterceptor.handleFailure(RetryingCallerInterceptorContextcontext,
+NoOpRetryableCallerInterceptor.handleFailure(RetryingCallerInterceptorContextcontext,
   http://docs.oracle.com/javase/7/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwablet)
 
 
 void
-NoOpRetryableCallerInterceptor.intercept(RetryingCallerInterceptorContextabstractRetryingCallerInterceptorContext)
+PreemptiveFastFailInterceptor.intercept(RetryingCallerInterceptorContextcontext)
 
 
 abstract void
@@ -207,11 +207,11 @@
 
 
 void
-PreemptiveFastFailInterceptor.intercept(RetryingCallerInterceptorContextcontext)
+NoOpRetryableCallerInterceptor.intercept(RetryingCallerInterceptorContextabstractRetryingCallerInterceptorContext)
 
 
 void
-NoOpRetryableCallerInterceptor.updateFailureInfo(RetryingCallerInterceptorContextcontext)
+PreemptiveFastFailInterceptor.updateFailureInfo(RetryingCallerInterceptorContextcontext)
 
 
 abstract void
@@ -221,7 +221,7 @@
 
 
 void
-PreemptiveFastFailInterceptor.updateFailureInfo(RetryingCallerInterceptorContextcontext)
+NoOpRetryableCallerInterceptor.updateFailureInfo(RetryingCallerInterceptorContextcontext)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
index 64ca784..8b6492f 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
@@ -255,11 +255,11 @@
 
 
 int
-RowMutations.compareTo(Rowi)
+Get.compareTo(Rowother)
 
 
 int
-Get.compareTo(Rowother)
+Mutation.compareTo(Rowd)
 
 
 int
@@ -267,7 +267,7 @@
 
 
 int
-Mutation.compareTo(Rowd)
+RowMutations.compareTo(Rowi)
 
 
 private boolean

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/class-use/RpcRetryingCaller.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RpcRetryingCaller.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RpcRetryingCaller.html
index 6974578..461616b 100644
--- 

[36/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
index cff4b22..ca75e0b 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
@@ -234,16 +234,16 @@
 
 
 int
+BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparatorcomparator,
+Cellkey)
+
+
+int
 DataBlockEncoder.EncodedSeeker.compareKey(CellComparatorcomparator,
 Cellkey)
 Compare the given key against the current key
 
 
-
-int
-BufferedDataBlockEncoder.BufferedEncodedSeeker.compareKey(CellComparatorcomparator,
-Cellkey)
-
 
 DataBlockEncoder.EncodedSeeker
 CopyKeyDataBlockEncoder.createSeeker(CellComparatorcomparator,
@@ -251,24 +251,24 @@
 
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
-HFileBlockDecodingContextdecodingCtx)
+DataBlockEncoder.createSeeker(CellComparatorcomparator,
+HFileBlockDecodingContextdecodingCtx)
+Create a HFileBlock seeker which find KeyValues within a 
block.
+
 
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
+FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-DataBlockEncoder.createSeeker(CellComparatorcomparator,
-HFileBlockDecodingContextdecodingCtx)
-Create a HFileBlock seeker which find KeyValues within a 
block.
-
+PrefixKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
+HFileBlockDecodingContextdecodingCtx)
 
 
 DataBlockEncoder.EncodedSeeker
-FastDiffDeltaEncoder.createSeeker(CellComparatorcomparator,
+DiffKeyDeltaEncoder.createSeeker(CellComparatorcomparator,
 HFileBlockDecodingContextdecodingCtx)
 
 
@@ -298,17 +298,15 @@
 
 
 
-private CellComparator
-HFileReaderImpl.comparator
-Key comparator
-
-
-
 protected CellComparator
 HFileWriterImpl.comparator
 Key comparator.
 
 
+
+protected CellComparator
+HFile.WriterFactory.comparator
+
 
 private CellComparator
 HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
@@ -322,8 +320,10 @@
 
 
 
-protected CellComparator
-HFile.WriterFactory.comparator
+private CellComparator
+HFileReaderImpl.comparator
+Key comparator
+
 
 
 
@@ -344,11 +344,11 @@
 
 
 CellComparator
-HFileReaderImpl.getComparator()
+HFile.Reader.getComparator()
 
 
 CellComparator
-HFile.Reader.getComparator()
+HFileReaderImpl.getComparator()
 
 
 
@@ -500,35 +500,35 @@
 
 
 private CellComparator
-HStore.comparator
+StoreFileWriter.Builder.comparator
 
 
 private CellComparator
-Segment.comparator
+AbstractMemStore.comparator
 
 
 protected CellComparator
-StripeStoreFlusher.StripeFlushRequest.comparator
+StripeMultiFileWriter.comparator
 
 
 private CellComparator
-ScanInfo.comparator
+Segment.comparator
 
 
 private CellComparator
-AbstractMemStore.comparator
+ScanInfo.comparator
 
 
-protected CellComparator
-StripeMultiFileWriter.comparator
+private CellComparator
+HStore.comparator
 
 
 protected CellComparator
 HRegion.RegionScannerImpl.comparator
 
 
-private CellComparator
-StoreFileWriter.Builder.comparator
+protected CellComparator
+StripeStoreFlusher.StripeFlushRequest.comparator
 
 
 protected CellComparator
@@ -565,11 +565,11 @@
 
 
 CellComparator
-HStore.getComparator()
+StoreFileReader.getComparator()
 
 
-CellComparator
-Store.getComparator()
+protected CellComparator
+AbstractMemStore.getComparator()
 
 
 protected CellComparator
@@ -578,24 +578,24 @@
 
 
 
-(package private) CellComparator
-StoreFileScanner.getComparator()
+CellComparator
+ScanInfo.getComparator()
 
 
 CellComparator
-StoreFileReader.getComparator()
+Store.getComparator()
 
 
 CellComparator
 KeyValueHeap.KVScannerComparator.getComparator()
 
 
-CellComparator
-ScanInfo.getComparator()
+(package private) CellComparator
+StoreFileScanner.getComparator()
 
 
-protected CellComparator
-AbstractMemStore.getComparator()
+CellComparator
+HStore.getComparator()
 
 
 
@@ -630,17 +630,11 @@
 
 
 protected void
-DateTieredStoreEngine.createComponents(org.apache.hadoop.conf.Configurationconf,
+DefaultStoreEngine.createComponents(org.apache.hadoop.conf.Configurationconf,
 Storestore,
 CellComparatorkvComparator)
 
 
-protected void
-StripeStoreEngine.createComponents(org.apache.hadoop.conf.Configurationconf,
-Storestore,
-CellComparatorcomparator)
-
-
 protected abstract void
 

[38/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
index 8c07308..bbb672e 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
@@ -294,11 +294,11 @@
 
 
 private Abortable
-RpcExecutor.abortable
+SimpleRpcScheduler.abortable
 
 
 private Abortable
-SimpleRpcScheduler.abortable
+RpcExecutor.abortable
 
 
 
@@ -522,18 +522,18 @@
 
 
 RpcScheduler
-SimpleRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
-PriorityFunctionpriority,
-Abortableserver)
-
-
-RpcScheduler
 RpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
 PriorityFunctionpriority,
 Abortableserver)
 Constructs a RpcScheduler.
 
 
+
+RpcScheduler
+SimpleRpcSchedulerFactory.create(org.apache.hadoop.conf.Configurationconf,
+PriorityFunctionpriority,
+Abortableserver)
+
 
 
 



[03/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/ipc/AsyncCall.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/AsyncCall.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/AsyncCall.html
index 9dd9fae..986d7a3 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/AsyncCall.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/AsyncCall.html
@@ -36,7 +36,7 @@
 
 
 Prev 
Class
-Next 
Class
+Next 
Class
 
 
 Frames
@@ -79,7 +79,7 @@
 
 
 org.apache.hadoop.hbase.ipc
-Class AsyncCall
+Class AsyncCallM extends 
com.google.protobuf.Message,T
 
 
 
@@ -89,10 +89,15 @@
 io.netty.util.concurrent.AbstractFutureV
 
 
-io.netty.util.concurrent.DefaultPromisecom.google.protobuf.Message
+io.netty.util.concurrent.DefaultPromiseT
 
 
-org.apache.hadoop.hbase.ipc.AsyncCall
+org.apache.hadoop.hbase.ipc.PromiseT
+
+
+org.apache.hadoop.hbase.ipc.AsyncCallM,T
+
+
 
 
 
@@ -103,15 +108,16 @@
 
 
 
+Type Parameters:T - 
Type of message returnedM - Message returned in 
communication to be converted
 
 All Implemented Interfaces:
-io.netty.util.concurrent.Promisecom.google.protobuf.Message, http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in 
java.util.concurrent">Futurecom.google.protobuf.Message
+io.netty.util.concurrent.PromiseT, http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">FutureT
 
 
 
 @InterfaceAudience.Private
-public class AsyncCall
-extends 
io.netty.util.concurrent.DefaultPromisecom.google.protobuf.Message
+public class AsyncCallM
 extends com.google.protobuf.Message,T
+extends PromiseT
 Represents an Async Hbase call and its response.
 
  Responses are passed on to its given doneHandler and failures to the 
rpcController
@@ -138,8 +144,16 @@ extends 
io.netty.util.concurrent.DefaultPromisecom.google.protobuf.Message
 callStats
 
 
-(package private) PayloadCarryingRpcController
-controller
+private CellScanner
+cellScanner
+
+
+private AsyncRpcChannelImpl
+channel
+
+
+private IOExceptionConverter
+exceptionConverter
 
 
 (package private) int
@@ -150,13 +164,21 @@ extends 
io.netty.util.concurrent.DefaultPromisecom.google.protobuf.Message
 LOG
 
 
+private MessageConverterM,T
+messageConverter
+
+
 (package private) 
com.google.protobuf.Descriptors.MethodDescriptor
 method
 
-
+
 (package private) 
com.google.protobuf.Message
 param
 
+
+private int
+priority
+
 
 (package private) 
com.google.protobuf.Message
 responseDefaultType
@@ -184,12 +206,16 @@ extends 
io.netty.util.concurrent.DefaultPromisecom.google.protobuf.Message
 Constructor and Description
 
 
-AsyncCall(io.netty.channel.EventLoopeventLoop,
+AsyncCall(AsyncRpcChannelImplchannel,
   intconnectId,
   com.google.protobuf.Descriptors.MethodDescriptormd,
   com.google.protobuf.Messageparam,
-  PayloadCarryingRpcControllercontroller,
-  com.google.protobuf.MessageresponseDefaultType,
+  CellScannercellScanner,
+  MresponseDefaultType,
+  MessageConverterM,TmessageConverter,
+  IOExceptionConverterexceptionConverter,
+  longrpcTimeout,
+  intpriority,
   MetricsConnection.CallStatscallStats)
 Constructor
 
@@ -210,31 +236,45 @@ extends 
io.netty.util.concurrent.DefaultPromisecom.google.protobuf.Message
 Method and Description
 
 
+boolean
+cancel(booleanmayInterupt)
+
+
+CellScanner
+cellScanner()
+Get the cellScanner for this request.
+
+
+
+int
+getPriority()
+
+
 long
 getRpcTimeout()
 Get the rpc timeout
 
 
-
+
 long
 getStartTime()
 Get the start time
 
 
-
+
 void
 setFailed(http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOExceptionexception)
 Set failed
 
 
-
+
 void
-setSuccess(com.google.protobuf.Messagevalue,
+setSuccess(Mvalue,
 CellScannercellBlockScanner)
 Set success with a cellBlockScanner
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 
@@ -244,7 +284,7 @@ extends 
io.netty.util.concurrent.DefaultPromisecom.google.protobuf.Message
 
 
 Methods inherited from 
classio.netty.util.concurrent.DefaultPromise
-addListener, addListeners, await, await, await, awaitUninterruptibly, 
awaitUninterruptibly, awaitUninterruptibly, cancel, cause, checkDeadLock, 
executor, getNow, isCancellable, isCancelled, isDone, isSuccess, 
notifyListener, removeListener, removeListeners, setFailure, setSuccess, 
setUncancellable, sync, syncUninterruptibly, toStringBuilder, tryFailure, 
trySuccess
+addListener, addListeners, await, await, await, awaitUninterruptibly, 
awaitUninterruptibly, 

[30/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
index 38c22d9..1689c47 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
@@ -166,13 +166,13 @@
 
 
 
-private InterProcessLock.MetadataHandler
-ZKInterProcessReadWriteLock.handler
-
-
 protected InterProcessLock.MetadataHandler
 ZKInterProcessLockBase.handler
 
+
+private InterProcessLock.MetadataHandler
+ZKInterProcessReadWriteLock.handler
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/InvalidFamilyOperationException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/InvalidFamilyOperationException.html
 
b/devapidocs/org/apache/hadoop/hbase/class-use/InvalidFamilyOperationException.html
index 2ea0fb9..fc5e2ed 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/InvalidFamilyOperationException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/InvalidFamilyOperationException.html
@@ -64,48 +64,7 @@
 
 Uses of 
Classorg.apache.hadoop.hbase.InvalidFamilyOperationException
 
-
-
-
-
-Packages that use InvalidFamilyOperationException
-
-Package
-Description
-
-
-
-org.apache.hadoop.hbase.master.handler
-
-
-
-
-
-
-
-
-
-
-Uses of InvalidFamilyOperationException 
in org.apache.hadoop.hbase.master.handler
-
-Methods in org.apache.hadoop.hbase.master.handler
 that throw InvalidFamilyOperationException
-
-Modifier and Type
-Method and Description
-
-
-
-(package private) byte[]
-TableEventHandler.hasColumnFamily(HTableDescriptorhtd,
-  byte[]cf)
-
-
-
-
-
-
-
-
+No usage of 
org.apache.hadoop.hbase.InvalidFamilyOperationException
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
index 4cec977..344c681 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
@@ -785,28 +785,22 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFile
-DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValuetargetKey)
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFile
 StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValuetargetKey)
 Gets initial, full list of candidate store files to check 
for row-key-before.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFile
 StripeStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValuetargetKey)
 See StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue)
  for details on this methods.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFile
-DefaultStoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFilecandidateFiles,
-  KeyValuetargetKey,
-  Cellcandidate)
+DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValuetargetKey)
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFile
 StoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFilecandidateFiles,
   KeyValuetargetKey,
@@ -814,7 +808,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Updates the candidate list for finding row key before.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorStoreFile
 

[18/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterSwitchType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterSwitchType.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterSwitchType.html
index b7b8005..51bc99b 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterSwitchType.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/MasterSwitchType.html
@@ -146,29 +146,29 @@ the order they are declared.
 
 
 boolean
-HBaseAdmin.isSplitOrMergeEnabled(MasterSwitchTypeswitchType)
-
-
-boolean
 Admin.isSplitOrMergeEnabled(MasterSwitchTypeswitchType)
 Query the current state of the switch
 
 
+
+boolean
+HBaseAdmin.isSplitOrMergeEnabled(MasterSwitchTypeswitchType)
+
 
 boolean[]
-HBaseAdmin.setSplitOrMergeEnabled(booleanenabled,
+Admin.setSplitOrMergeEnabled(booleanenabled,
 booleansynchronous,
 booleanskipLock,
-MasterSwitchType...switchTypes)
+MasterSwitchType...switchTypes)
+Turn the Split or Merge switches on or off.
+
 
 
 boolean[]
-Admin.setSplitOrMergeEnabled(booleanenabled,
+HBaseAdmin.setSplitOrMergeEnabled(booleanenabled,
 booleansynchronous,
 booleanskipLock,
-MasterSwitchType...switchTypes)
-Turn the Split or Merge switches on or off.
-
+MasterSwitchType...switchTypes)
 
 
 
@@ -186,43 +186,43 @@ the order they are declared.
 
 
 void
-BaseMasterAndRegionObserver.postSetSplitOrMergeEnabled(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.postSetSplitOrMergeEnabled(ObserverContextMasterCoprocessorEnvironmentctx,
 booleannewValue,
-MasterSwitchTypeswitchType)
+MasterSwitchTypeswitchType)
+Called after setting split / merge switch
+
 
 
 void
-BaseMasterObserver.postSetSplitOrMergeEnabled(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterAndRegionObserver.postSetSplitOrMergeEnabled(ObserverContextMasterCoprocessorEnvironmentctx,
 booleannewValue,
 MasterSwitchTypeswitchType)
 
 
 void
-MasterObserver.postSetSplitOrMergeEnabled(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterObserver.postSetSplitOrMergeEnabled(ObserverContextMasterCoprocessorEnvironmentctx,
 booleannewValue,
-MasterSwitchTypeswitchType)
-Called after setting split / merge switch
-
+MasterSwitchTypeswitchType)
 
 
 boolean
-BaseMasterAndRegionObserver.preSetSplitOrMergeEnabled(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.preSetSplitOrMergeEnabled(ObserverContextMasterCoprocessorEnvironmentctx,
   booleannewValue,
-  MasterSwitchTypeswitchType)
+  MasterSwitchTypeswitchType)
+Called prior to setting split / merge switch
+
 
 
 boolean
-BaseMasterObserver.preSetSplitOrMergeEnabled(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterAndRegionObserver.preSetSplitOrMergeEnabled(ObserverContextMasterCoprocessorEnvironmentctx,
   booleannewValue,
   MasterSwitchTypeswitchType)
 
 
 boolean
-MasterObserver.preSetSplitOrMergeEnabled(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterObserver.preSetSplitOrMergeEnabled(ObserverContextMasterCoprocessorEnvironmentctx,
   booleannewValue,
-  MasterSwitchTypeswitchType)
-Called prior to setting split / merge switch
-
+  MasterSwitchTypeswitchType)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/class-use/MetricsConnection.CallStats.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/MetricsConnection.CallStats.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/MetricsConnection.CallStats.html
index bcd685e..a0437d1 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/MetricsConnection.CallStats.html
+++ 

hbase git commit: HBASE-15791 Improve javadoc around ScheduledChore

2016-05-09 Thread jmhsieh
Repository: hbase
Updated Branches:
  refs/heads/master 05378cbf6 -> 541d1da5f


HBASE-15791 Improve javadoc around ScheduledChore


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/541d1da5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/541d1da5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/541d1da5

Branch: refs/heads/master
Commit: 541d1da5fee9e52f3d6dc86f754dbdec9dab1919
Parents: 05378cb
Author: Jonathan M Hsieh 
Authored: Sat May 7 15:33:20 2016 -0700
Committer: Jonathan M Hsieh 
Committed: Mon May 9 08:41:28 2016 -0700

--
 .../org/apache/hadoop/hbase/ScheduledChore.java | 37 +---
 1 file changed, 25 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/541d1da5/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
index 2c54b9f..5c5bcd8 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ScheduledChore.java
@@ -55,9 +55,9 @@ public abstract class ScheduledChore implements Runnable {
   /**
* Scheduling parameters. Used by ChoreService when scheduling the chore to 
run periodically
*/
-  private final int period;
+  private final int period; // in TimeUnit units
   private final TimeUnit timeUnit;
-  private final long initialDelay;
+  private final long initialDelay; // in TimeUnit units
 
   /**
* Interface to the ChoreService that this ScheduledChore is scheduled with. 
null if the chore is
@@ -68,8 +68,8 @@ public abstract class ScheduledChore implements Runnable {
   /**
* Variables that encapsulate the meaningful state information
*/
-  private long timeOfLastRun = -1;
-  private long timeOfThisRun = -1;
+  private long timeOfLastRun = -1; // system time millis
+  private long timeOfThisRun = -1; // system time millis
   private boolean initialChoreComplete = false;
 
   /**
@@ -127,7 +127,7 @@ public abstract class ScheduledChore implements Runnable {
   /**
* @param name Name assigned to Chore. Useful for identification amongst 
chores of the same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore 
will cancel and cleanup
-   * @param period Period with which this Chore repeats execution when 
scheduled.
+   * @param period Period in millis with which this Chore repeats execution 
when scheduled.
*/
   public ScheduledChore(final String name, Stoppable stopper, final int 
period) {
 this(name, stopper, period, DEFAULT_INITIAL_DELAY);
@@ -136,7 +136,7 @@ public abstract class ScheduledChore implements Runnable {
   /**
* @param name Name assigned to Chore. Useful for identification amongst 
chores of the same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore 
will cancel and cleanup
-   * @param period Period with which this Chore repeats execution when 
scheduled.
+   * @param period Period in millis with which this Chore repeats execution 
when scheduled.
* @param initialDelay Delay before this Chore begins to execute once it has 
been scheduled. A
*  value of 0 means the chore will begin to execute immediately. 
Negative delays are
*  invalid and will be corrected to a value of 0.
@@ -149,10 +149,10 @@ public abstract class ScheduledChore implements Runnable {
   /**
* @param name Name assigned to Chore. Useful for identification amongst 
chores of the same type
* @param stopper When {@link Stoppable#isStopped()} is true, this chore 
will cancel and cleanup
-   * @param period Period with which this Chore repeats execution when 
scheduled.
-   * @param initialDelay Delay before this Chore begins to execute once it has 
been scheduled. A
-   *  value of 0 means the chore will begin to execute immediately. 
Negative delays are
-   *  invalid and will be corrected to a value of 0.
+   * @param period Period in Timeunit unit with which this Chore repeats 
execution when scheduled.
+   * @param initialDelay Delay in Timeunit unit before this Chore begins to 
execute once it has been
+   *  scheduled. A value of 0 means the chore will begin to execute 
immediately. Negative
+   *  delays are invalid and will be corrected to a value of 0.
* @param unit The unit that is used to measure period and initialDelay
*/
   public ScheduledChore(final String name, Stoppable stopper, final int period,
@@ -213,8 +213,8 @@ public abstract class ScheduledChore implements Runnable {
   }
 
   /**
-   * 

[48/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
index 6c98f44..87c63f2 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.ReturnCode.html
@@ -107,107 +107,107 @@
 
 
 Filter.ReturnCode
-PrefixFilter.filterKeyValue(Cellv)
+MultiRowRangeFilter.filterKeyValue(Cellignored)
 
 
 Filter.ReturnCode
-FirstKeyOnlyFilter.filterKeyValue(Cellv)
+DependentColumnFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-SingleColumnValueFilter.filterKeyValue(Cellc)
+RandomRowFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-SkipFilter.filterKeyValue(Cellv)
+ColumnPrefixFilter.filterKeyValue(Cellcell)
 
 
 Filter.ReturnCode
-FuzzyRowFilter.filterKeyValue(Cellc)
+SkipFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-PageFilter.filterKeyValue(Cellignored)
+InclusiveStopFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-ColumnPaginationFilter.filterKeyValue(Cellv)
+WhileMatchFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cellv)
-Deprecated.
-
+SingleColumnValueFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-KeyOnlyFilter.filterKeyValue(Cellignored)
+QualifierFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-FamilyFilter.filterKeyValue(Cellv)
+FuzzyRowFilter.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-ColumnCountGetFilter.filterKeyValue(Cellv)
+KeyOnlyFilter.filterKeyValue(Cellignored)
 
 
 Filter.ReturnCode
-RowFilter.filterKeyValue(Cellv)
+PrefixFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-ColumnPrefixFilter.filterKeyValue(Cellcell)
+MultipleColumnPrefixFilter.filterKeyValue(Cellkv)
 
 
 Filter.ReturnCode
-DependentColumnFilter.filterKeyValue(Cellc)
+FirstKeyValueMatchingQualifiersFilter.filterKeyValue(Cellv)
+Deprecated.
+
 
 
 Filter.ReturnCode
-ColumnRangeFilter.filterKeyValue(Cellkv)
+PageFilter.filterKeyValue(Cellignored)
 
 
-abstract Filter.ReturnCode
-Filter.filterKeyValue(Cellv)
-A way to filter based on the column family, column 
qualifier and/or the column value.
-
+Filter.ReturnCode
+ColumnCountGetFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-ValueFilter.filterKeyValue(Cellv)
+FamilyFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-RandomRowFilter.filterKeyValue(Cellv)
+TimestampsFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-WhileMatchFilter.filterKeyValue(Cellv)
+RowFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-QualifierFilter.filterKeyValue(Cellv)
+FilterList.filterKeyValue(Cellc)
 
 
 Filter.ReturnCode
-FilterList.filterKeyValue(Cellc)
+FirstKeyOnlyFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-TimestampsFilter.filterKeyValue(Cellv)
+ColumnPaginationFilter.filterKeyValue(Cellv)
 
 
 Filter.ReturnCode
-MultiRowRangeFilter.filterKeyValue(Cellignored)
+ColumnRangeFilter.filterKeyValue(Cellkv)
 
 
 Filter.ReturnCode
-InclusiveStopFilter.filterKeyValue(Cellv)
+ValueFilter.filterKeyValue(Cellv)
 
 
-Filter.ReturnCode
-MultipleColumnPrefixFilter.filterKeyValue(Cellkv)
+abstract Filter.ReturnCode
+Filter.filterKeyValue(Cellv)
+A way to filter based on the column family, column 
qualifier and/or the column value.
+
 
 
 static Filter.ReturnCode

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index e2db83a..02c305c 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -132,11 +132,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Filter
-Query.getFilter()
+Scan.getFilter()
 
 
 Filter
-Scan.getFilter()
+Query.getFilter()
 
 
 
@@ -148,15 +148,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+Scan
+Scan.setFilter(Filterfilter)
+
+
 Query
 Query.setFilter(Filterfilter)
 Apply the specified server-side filter when performing the 
Query.
 
 
-
-Scan
-Scan.setFilter(Filterfilter)
-
 
 Get
 Get.setFilter(Filterfilter)
@@ -382,75 +382,75 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-PrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true;
 title="class or interface in 
java.util">ArrayListbyte[]filterArguments)
+DependentColumnFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true;
 

[28/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
index 8f0559d..3f8803b 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/ServerName.html
@@ -698,16 +698,16 @@
 
 
 
-void
-MetaCache.cacheLocation(TableNametableName,
+private void
+ConnectionImplementation.cacheLocation(TableNametableName,
   ServerNamesource,
   HRegionLocationlocation)
 Put a newly discovered HRegionLocation into the cache.
 
 
 
-private void
-ConnectionImplementation.cacheLocation(TableNametableName,
+void
+MetaCache.cacheLocation(TableNametableName,
   ServerNamesource,
   HRegionLocationlocation)
 Put a newly discovered HRegionLocation into the cache.
@@ -736,13 +736,13 @@
 
 
 void
-ClusterConnection.clearCaches(ServerNamesn)
-Clear any caches that pertain to server name 
sn.
-
+ConnectionImplementation.clearCaches(ServerNameserverName)
 
 
 void
-ConnectionImplementation.clearCaches(ServerNameserverName)
+ClusterConnection.clearCaches(ServerNamesn)
+Clear any caches that pertain to server name 
sn.
+
 
 
 void
@@ -754,16 +754,16 @@
 
 
 void
-HBaseAdmin.closeRegion(ServerNamesn,
-  HRegionInfohri)
-
-
-void
 Admin.closeRegion(ServerNamesn,
   HRegionInfohri)
 Close a region.
 
 
+
+void
+HBaseAdmin.closeRegion(ServerNamesn,
+  HRegionInfohri)
+
 
 private void
 HBaseAdmin.compact(ServerNamesn,
@@ -773,29 +773,29 @@
 
 
 void
-HBaseAdmin.compactRegionServer(ServerNamesn,
+Admin.compactRegionServer(ServerNamesn,
   booleanmajor)
 Compact all regions on the region server
 
 
 
 void
-Admin.compactRegionServer(ServerNamesn,
+HBaseAdmin.compactRegionServer(ServerNamesn,
   booleanmajor)
 Compact all regions on the region server
 
 
 
-CoprocessorRpcChannel
-HBaseAdmin.coprocessorService(ServerNamesn)
-
-
-CoprocessorRpcChannel
+CoprocessorRpcChannel
 Admin.coprocessorService(ServerNamesn)
 Creates and returns a RpcChannel instance
  connected to the passed region server.
 
 
+
+CoprocessorRpcChannel
+HBaseAdmin.coprocessorService(ServerNamesn)
+
 
 protected MultiServerCallableRow
 AsyncProcess.createCallable(ServerNameserver,
@@ -838,13 +838,13 @@
 
 
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-ClusterConnection.getAdmin(ServerNameserverName)
-Establishes a connection to the region server at the 
specified address.
-
+ConnectionImplementation.getAdmin(ServerNameserverName)
 
 
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
-ConnectionImplementation.getAdmin(ServerNameserverName)
+ClusterConnection.getAdmin(ServerNameserverName)
+Establishes a connection to the region server at the 
specified address.
+
 
 
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService.BlockingInterface
@@ -875,15 +875,15 @@
 
 
 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
+ConnectionImplementation.getClient(ServerNamesn)
+
+
+org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
 ClusterConnection.getClient(ServerNameserverName)
 Establishes a connection to the region server at the 
specified address, and returns
  a region client protocol.
 
 
-
-org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
-ConnectionImplementation.getClient(ServerNamesn)
-
 
 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService.BlockingInterface
 HConnection.getClient(ServerNameserverName)
@@ -904,14 +904,14 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
-HBaseAdmin.getOnlineRegions(ServerNamesn)
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
 Admin.getOnlineRegions(ServerNamesn)
 Get all the online regions on a region server.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
+HBaseAdmin.getOnlineRegions(ServerNamesn)
+
 
 (package private) ServerStatistics
 ServerStatisticTracker.getServerStatsForTesting(ServerNameserver)
@@ -949,22 +949,22 @@
 
 
 boolean
+ConnectionImplementation.isDeadServer(ServerNamesn)
+
+
+boolean
 ClusterConnection.isDeadServer(ServerNameserverName)
 Deprecated.
 internal method, do not use thru HConnection
 
 
 
-
+
 boolean
 ClusterStatusListener.isDeadServer(ServerNamesn)
 

[11/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
index f3491da..3edec83 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
@@ -185,118 +185,118 @@
 
 
 void
-BaseMasterAndRegionObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx)
+MasterObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx)
+Called after a abortProcedure request has been 
processed.
+
 
 
 void
-BaseMasterObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx)
+BaseMasterAndRegionObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx)
 
 
 void
-MasterObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx)
-Called after a abortProcedure request has been 
processed.
-
+BaseMasterObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx)
 
 
 void
-BaseMasterAndRegionObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
   HColumnDescriptorcolumnFamily)
-Deprecated.
+Deprecated.
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645).
+ Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterAndRegionObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
   HColumnDescriptorcolumnFamily)
-Deprecated.
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645).
- Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated.
 
 
 
 void
-MasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
   HColumnDescriptorcolumnFamily)
 Deprecated.
 As of release 2.0.0, this will be removed in HBase 3.0.0
  (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645).
- Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+ Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
 
 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
-  HColumnDescriptorcolumnFamily)
+  HColumnDescriptorcolumnFamily)
+Called after the new column family has been created.
+
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
   HColumnDescriptorcolumnFamily)
 
 
 void
-MasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
-  HColumnDescriptorcolumnFamily)
-Called after the new column family has been created.
-
+  HColumnDescriptorcolumnFamily)
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx,
 TableNametableName,
 HColumnDescriptorcolumnFamily)
-Deprecated.
+Deprecated.
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). Use
+ MasterObserver.postCompletedAddColumnFamilyAction(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void
-BaseMasterObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx,

[31/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
index 9282a66..a005330 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
@@ -123,68 +123,64 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-org.apache.hadoop.hbase.master.handler
-
-
-
 org.apache.hadoop.hbase.master.procedure
 
 
-
+
 org.apache.hadoop.hbase.master.snapshot
 
 
-
+
 org.apache.hadoop.hbase.mob
 
 
-
+
 org.apache.hadoop.hbase.quotas
 
 
-
+
 org.apache.hadoop.hbase.regionserver
 
 
-
+
 org.apache.hadoop.hbase.regionserver.handler
 
 
-
+
 org.apache.hadoop.hbase.rest
 
 HBase REST
 
 
-
+
 org.apache.hadoop.hbase.rest.client
 
 
-
+
 org.apache.hadoop.hbase.rest.model
 
 
-
+
 org.apache.hadoop.hbase.rsgroup
 
 
-
+
 org.apache.hadoop.hbase.security.access
 
 
-
+
 org.apache.hadoop.hbase.security.visibility
 
 
-
+
 org.apache.hadoop.hbase.snapshot
 
 
-
+
 org.apache.hadoop.hbase.tool
 
 
-
+
 org.apache.hadoop.hbase.util
 
 
@@ -529,66 +525,66 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HTableDescriptor[]
-HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
+Admin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
 Delete tables matching the passed in pattern and wait on 
completion.
 
 
 
 HTableDescriptor[]
-Admin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
+HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
 Delete tables matching the passed in pattern and wait on 
completion.
 
 
 
 HTableDescriptor[]
-HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringregex)
-
-
-HTableDescriptor[]
 Admin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
 Deletes tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
+HBaseAdmin.deleteTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringregex)
 
-
+
 HTableDescriptor[]
 Admin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
 Disable tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringregex)
+HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
 
-
+
 HTableDescriptor[]
 Admin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringregex)
 Disable tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
+HBaseAdmin.disableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringregex)
 
-
+
 HTableDescriptor[]
 Admin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
 Enable tables matching the passed in pattern and wait on 
completion.
 
 
-
+
 HTableDescriptor[]
-HBaseAdmin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringregex)
+HBaseAdmin.enableTables(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 

[33/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
index d16d0a3..54d5d52 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
@@ -863,11 +863,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-ScannerCallableWithReplicas.getHRegionInfo()
+ScannerCallable.getHRegionInfo()
 
 
 HRegionInfo
-ScannerCallable.getHRegionInfo()
+ScannerCallableWithReplicas.getHRegionInfo()
 
 
 HRegionInfo
@@ -875,7 +875,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-RegionServerCallable.getHRegionInfo()
+AbstractRegionServerCallable.getHRegionInfo()
 
 
 private HRegionInfo
@@ -905,28 +905,28 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
-HBaseAdmin.getOnlineRegions(ServerNamesn)
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
 Admin.getOnlineRegions(ServerNamesn)
 Get all the online regions on a region server.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
+HBaseAdmin.getOnlineRegions(ServerNamesn)
+
 
 (package private) PairHRegionInfo,ServerName
 HBaseAdmin.getRegion(byte[]regionName)
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
-HBaseAdmin.getTableRegions(TableNametableName)
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
 Admin.getTableRegions(TableNametableName)
 Get the regions of a given table.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo
+HBaseAdmin.getTableRegions(TableNametableName)
+
 
 
 
@@ -944,16 +944,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-HBaseAdmin.closeRegion(ServerNamesn,
-  HRegionInfohri)
-
-
-void
 Admin.closeRegion(ServerNamesn,
   HRegionInfohri)
 Close a region.
 
 
+
+void
+HBaseAdmin.closeRegion(ServerNamesn,
+  HRegionInfohri)
+
 
 private void
 HBaseAdmin.compact(ServerNamesn,
@@ -1071,17 +1071,17 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-SplitLogManagerCoordination.markRegionsRecovering(ServerNameserverName,
+ZKSplitLogManagerCoordination.markRegionsRecovering(ServerNameserverName,
   http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetHRegionInfouserRegions)
-Mark regions in recovering state for distributed log 
replay
+Create znodes 
/hbase/recovering-regions/[region_ids...]/[failed region server names ...] for
+ all regions of the passed in region servers
 
 
 
 void
-ZKSplitLogManagerCoordination.markRegionsRecovering(ServerNameserverName,
+SplitLogManagerCoordination.markRegionsRecovering(ServerNameserverName,
   http://docs.oracle.com/javase/7/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetHRegionInfouserRegions)
-Create znodes 
/hbase/recovering-regions/[region_ids...]/[failed region server names ...] for
- all regions of the passed in region servers
+Mark regions in recovering state for distributed log 
replay
 
 
 
@@ -1113,16 +1113,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-BaseMasterAndRegionObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx,
-HRegionInforegionInfo)
-
-
-void
-BaseMasterObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx,
-HRegionInforegionInfo)
-
-
-void
 MasterObserver.postAssign(ObserverContextMasterCoprocessorEnvironmentctx,
 HRegionInforegionInfo)
 Called after the region assignment has been requested.
@@ -1130,15 +1120,13 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-BaseMasterAndRegionObserver.postCompletedCreateTableAction(ObserverContextMasterCoprocessorEnvironmentctx,
-HTableDescriptordesc,
-HRegionInfo[]regions)

[05/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
index 1d08f0e..318b8ae 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/BlockCacheKey.html
@@ -160,10 +160,8 @@
 
 
 void
-BlockCache.cacheBlock(BlockCacheKeycacheKey,
-Cacheablebuf)
-Add block to cache (defaults to not in-memory).
-
+CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
+Cacheablebuf)
 
 
 void
@@ -174,8 +172,10 @@
 
 
 void
-CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
-Cacheablebuf)
+BlockCache.cacheBlock(BlockCacheKeycacheKey,
+Cacheablebuf)
+Add block to cache (defaults to not in-memory).
+
 
 
 void
@@ -184,35 +184,35 @@
 
 
 void
-BlockCache.cacheBlock(BlockCacheKeycacheKey,
+InclusiveCombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
 Cacheablebuf,
 booleaninMemory,
-booleancacheDataInL1)
-Add block to cache.
-
+booleancacheDataInL1)
 
 
 void
-LruBlockCache.cacheBlock(BlockCacheKeycacheKey,
+CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
 Cacheablebuf,
 booleaninMemory,
-booleancacheDataInL1)
-Cache the block with the specified name and buffer.
-
+booleancacheDataInL1)
 
 
 void
-CombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
+LruBlockCache.cacheBlock(BlockCacheKeycacheKey,
 Cacheablebuf,
 booleaninMemory,
-booleancacheDataInL1)
+booleancacheDataInL1)
+Cache the block with the specified name and buffer.
+
 
 
 void
-InclusiveCombinedBlockCache.cacheBlock(BlockCacheKeycacheKey,
+BlockCache.cacheBlock(BlockCacheKeycacheKey,
 Cacheablebuf,
 booleaninMemory,
-booleancacheDataInL1)
+booleancacheDataInL1)
+Add block to cache.
+
 
 
 void
@@ -229,9 +229,7 @@
 
 
 boolean
-BlockCache.evictBlock(BlockCacheKeycacheKey)
-Evict block from cache.
-
+CombinedBlockCache.evictBlock(BlockCacheKeycacheKey)
 
 
 boolean
@@ -239,7 +237,9 @@
 
 
 boolean
-CombinedBlockCache.evictBlock(BlockCacheKeycacheKey)
+BlockCache.evictBlock(BlockCacheKeycacheKey)
+Evict block from cache.
+
 
 
 boolean
@@ -247,35 +247,35 @@
 
 
 Cacheable
-BlockCache.getBlock(BlockCacheKeycacheKey,
+InclusiveCombinedBlockCache.getBlock(BlockCacheKeycacheKey,
 booleancaching,
 booleanrepeat,
-booleanupdateCacheMetrics)
-Fetch block from cache.
-
+booleanupdateCacheMetrics)
 
 
 Cacheable
-LruBlockCache.getBlock(BlockCacheKeycacheKey,
+CombinedBlockCache.getBlock(BlockCacheKeycacheKey,
 booleancaching,
 booleanrepeat,
-booleanupdateCacheMetrics)
-Get the buffer of the block with the specified name.
-
+booleanupdateCacheMetrics)
 
 
 Cacheable
-CombinedBlockCache.getBlock(BlockCacheKeycacheKey,
+LruBlockCache.getBlock(BlockCacheKeycacheKey,
 booleancaching,
 booleanrepeat,
-booleanupdateCacheMetrics)
+booleanupdateCacheMetrics)
+Get the buffer of the block with the specified name.
+
 
 
 Cacheable
-InclusiveCombinedBlockCache.getBlock(BlockCacheKeycacheKey,
+BlockCache.getBlock(BlockCacheKeycacheKey,
 booleancaching,
 booleanrepeat,
-booleanupdateCacheMetrics)
+booleanupdateCacheMetrics)
+Fetch block from cache.
+
 
 
 Cacheable
@@ -302,11 +302,8 @@
 
 
 void
-BlockCache.returnBlock(BlockCacheKeycacheKey,
-  Cacheableblock)
-Called when the scanner using the block decides to return 
the block once its usage
- is over.
-
+CombinedBlockCache.returnBlock(BlockCacheKeycacheKey,
+  Cacheableblock)
 
 
 void
@@ -315,8 +312,11 @@
 
 
 void
-CombinedBlockCache.returnBlock(BlockCacheKeycacheKey,
-  Cacheableblock)
+BlockCache.returnBlock(BlockCacheKeycacheKey,
+  Cacheableblock)
+Called when the scanner using the block decides to return 
the block once its usage
+ is over.
+
 
 
 void
@@ -501,14 +501,14 @@
 
 
 void
-BucketCache.BucketEntryGroup.add(http://docs.oracle.com/javase/7/docs/api/java/util/Map.Entry.html?is-external=true;
 title="class or interface in java.util">Map.EntryBlockCacheKey,BucketCache.BucketEntryblock)
-
-
-void
 CachedEntryQueue.add(http://docs.oracle.com/javase/7/docs/api/java/util/Map.Entry.html?is-external=true;
 

[46/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
index ccc396d..b1f947b 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/ByteRange.html
@@ -249,11 +249,11 @@
 
 
 ByteRange
-SimpleMutableByteRange.deepCopy()
+SimpleByteRange.deepCopy()
 
 
 ByteRange
-SimpleByteRange.deepCopy()
+SimpleMutableByteRange.deepCopy()
 
 
 ByteRange
@@ -280,12 +280,12 @@
 
 
 ByteRange
-SimpleMutableByteRange.put(intindex,
+SimpleByteRange.put(intindex,
   byteval)
 
 
 ByteRange
-SimpleByteRange.put(intindex,
+SimpleMutableByteRange.put(intindex,
   byteval)
 
 
@@ -297,12 +297,12 @@
 
 
 ByteRange
-SimpleMutableByteRange.put(intindex,
+SimpleByteRange.put(intindex,
   byte[]val)
 
 
 ByteRange
-SimpleByteRange.put(intindex,
+SimpleMutableByteRange.put(intindex,
   byte[]val)
 
 
@@ -317,14 +317,14 @@
 
 
 ByteRange
-SimpleMutableByteRange.put(intindex,
+SimpleByteRange.put(intindex,
   byte[]val,
   intoffset,
   intlength)
 
 
 ByteRange
-SimpleByteRange.put(intindex,
+SimpleMutableByteRange.put(intindex,
   byte[]val,
   intoffset,
   intlength)
@@ -338,12 +338,12 @@
 
 
 ByteRange
-SimpleMutableByteRange.putInt(intindex,
+SimpleByteRange.putInt(intindex,
 intval)
 
 
 ByteRange
-SimpleByteRange.putInt(intindex,
+SimpleMutableByteRange.putInt(intindex,
 intval)
 
 
@@ -355,12 +355,12 @@
 
 
 ByteRange
-SimpleMutableByteRange.putLong(intindex,
+SimpleByteRange.putLong(intindex,
   longval)
 
 
 ByteRange
-SimpleByteRange.putLong(intindex,
+SimpleMutableByteRange.putLong(intindex,
   longval)
 
 
@@ -372,12 +372,12 @@
 
 
 ByteRange
-SimpleMutableByteRange.putShort(intindex,
+SimpleByteRange.putShort(intindex,
 shortval)
 
 
 ByteRange
-SimpleByteRange.putShort(intindex,
+SimpleMutableByteRange.putShort(intindex,
 shortval)
 
 
@@ -434,11 +434,11 @@
 
 
 ByteRange
-SimpleMutableByteRange.shallowCopy()
+SimpleByteRange.shallowCopy()
 
 
 ByteRange
-SimpleByteRange.shallowCopy()
+SimpleMutableByteRange.shallowCopy()
 
 
 ByteRange
@@ -449,12 +449,12 @@
 
 
 ByteRange
-SimpleMutableByteRange.shallowCopySubRange(intinnerOffset,
+SimpleByteRange.shallowCopySubRange(intinnerOffset,
   intcopyLength)
 
 
 ByteRange
-SimpleByteRange.shallowCopySubRange(intinnerOffset,
+SimpleMutableByteRange.shallowCopySubRange(intinnerOffset,
   intcopyLength)
 
 
@@ -465,11 +465,11 @@
 
 
 ByteRange
-SimpleMutableByteRange.unset()
+SimpleByteRange.unset()
 
 
 ByteRange
-SimpleByteRange.unset()
+SimpleMutableByteRange.unset()
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
index e3c1f44..5532fb3 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/Order.html
@@ -104,15 +104,15 @@
 
 
 protected Order
-OrderedBytesBase.order
+RawString.order
 
 
 protected Order
-RawString.order
+RawBytes.order
 
 
 protected Order
-RawBytes.order
+OrderedBytesBase.order
 
 
 
@@ -125,23 +125,26 @@
 
 
 Order
-Struct.getOrder()
+DataType.getOrder()
+Retrieve the sort Order imposed by this data type, 
or null when
+ natural ordering is not preserved.
+
 
 
 Order
-RawLong.getOrder()
+Union3.getOrder()
 
 
 Order
-PBType.getOrder()
+RawLong.getOrder()
 
 
 Order
-RawFloat.getOrder()
+RawShort.getOrder()
 
 
 Order
-RawByte.getOrder()
+Struct.getOrder()
 
 
 Order
@@ -149,26 +152,23 @@
 
 
 Order
-DataType.getOrder()
-Retrieve the sort Order imposed by this data type, 
or null when
- natural ordering is not preserved.
-
+FixedLengthWrapper.getOrder()
 
 
 Order
-Union3.getOrder()
+RawByte.getOrder()
 
 
 Order
-Union4.getOrder()
+RawString.getOrder()
 
 
 Order
-RawInteger.getOrder()
+Union4.getOrder()
 
 
 Order
-RawDouble.getOrder()
+RawBytes.getOrder()
 
 
 Order
@@ -180,19 +180,19 @@
 
 
 Order
-RawString.getOrder()
+RawInteger.getOrder()
 
 
 Order
-FixedLengthWrapper.getOrder()
+RawDouble.getOrder()
 
 
 Order
-RawBytes.getOrder()
+RawFloat.getOrder()
 
 
 Order
-RawShort.getOrder()
+PBType.getOrder()
 
 
 



[26/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index 8b60c73..9bc68e9 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -313,11 +313,11 @@ service.
 
 
 private TableName
-HRegionInfo.tableName
+MetaTableAccessor.TableVisitorBase.tableName
 
 
 private TableName
-MetaTableAccessor.TableVisitorBase.tableName
+HRegionInfo.tableName
 
 
 
@@ -759,27 +759,27 @@ service.
 
 
 private TableName
-HBaseAdmin.TableFuture.tableName
+HRegionLocator.tableName
 
 
-private TableName
-ScannerCallableWithReplicas.tableName
+protected TableName
+RpcRetryingCallerWithReadReplicas.tableName
 
 
 private TableName
-BufferedMutatorParams.tableName
+BufferedMutatorImpl.tableName
 
 
 private TableName
-ClientScanner.tableName
+TableState.tableName
 
 
-protected TableName
-RegionAdminServiceCallable.tableName
+private TableName
+BufferedMutatorParams.tableName
 
 
-private TableName
-TableState.tableName
+protected TableName
+RegionAdminServiceCallable.tableName
 
 
 private TableName
@@ -787,23 +787,23 @@ service.
 
 
 private TableName
-HRegionLocator.tableName
+HTable.tableName
 
 
 private TableName
-BufferedMutatorImpl.tableName
+ClientScanner.tableName
 
 
 private TableName
-HTable.tableName
+ScannerCallableWithReplicas.tableName
 
 
-protected TableName
-RegionServerCallable.tableName
+private TableName
+HBaseAdmin.TableFuture.tableName
 
 
 protected TableName
-RpcRetryingCallerWithReadReplicas.tableName
+AbstractRegionServerCallable.tableName
 
 
 
@@ -837,19 +837,17 @@ service.
 
 
 TableName
-BufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
-
+HRegionLocator.getName()
 
 
 TableName
-RegionLocator.getName()
-Gets the fully qualified table name instance of this 
table.
-
+BufferedMutatorImpl.getName()
 
 
 TableName
-HRegionLocator.getName()
+BufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
+
 
 
 TableName
@@ -859,11 +857,13 @@ service.
 
 
 TableName
-BufferedMutatorImpl.getName()
+HTable.getName()
 
 
 TableName
-HTable.getName()
+RegionLocator.getName()
+Gets the fully qualified table name instance of this 
table.
+
 
 
 TableName
@@ -874,22 +874,22 @@ service.
 ClientScanner.getTable()
 
 
-protected TableName
-HBaseAdmin.TableFuture.getTableName()
+TableName
+TableState.getTableName()
+Table name for state
+
 
 
 TableName
 BufferedMutatorParams.getTableName()
 
 
-TableName
-TableState.getTableName()
-Table name for state
-
+protected TableName
+HBaseAdmin.TableFuture.getTableName()
 
 
 TableName
-RegionServerCallable.getTableName()
+AbstractRegionServerCallable.getTableName()
 
 
 private TableName
@@ -897,22 +897,22 @@ service.
 
 
 TableName[]
-HBaseAdmin.listTableNames()
-
-
-TableName[]
 ConnectionImplementation.listTableNames()
 Deprecated.
 Use Admin.listTableNames()
 instead
 
 
 
-
+
 TableName[]
 Admin.listTableNames()
 List all of the names of userspace tables.
 
 
+
+TableName[]
+HBaseAdmin.listTableNames()
+
 
 TableName[]
 HConnection.listTableNames()
@@ -923,58 +923,58 @@ service.
 
 
 TableName[]
-HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
-
-
-TableName[]
 Admin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern)
 List all of the names of userspace tables.
 
 
-
+
 TableName[]
-HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern,
-booleanincludeSysTables)
+HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in 
java.util.regex">Patternpattern)
 
-
+
 TableName[]
 Admin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern,
 booleanincludeSysTables)
 List all of the names of userspace tables.
 
 
-
+
 TableName[]
-HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">Stringregex)
+HBaseAdmin.listTableNames(http://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html?is-external=true;
 title="class or interface in java.util.regex">Patternpattern,
+ 

[44/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/apidocs/overview-tree.html
--
diff --git a/apidocs/overview-tree.html b/apidocs/overview-tree.html
index 9429c41..1224487 100644
--- a/apidocs/overview-tree.html
+++ b/apidocs/overview-tree.html
@@ -203,7 +203,6 @@
 org.apache.hadoop.hbase.io.crypto.Encryption.Context
 
 
-org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel (implements 
com.google.protobuf.BlockingRpcChannel, com.google.protobuf.RpcChannel)
 org.apache.hadoop.hbase.util.Counter
 org.apache.hadoop.hbase.io.crypto.DefaultCipherProvider (implements 
org.apache.hadoop.hbase.io.crypto.CipherProvider)
 org.apache.hadoop.hbase.io.crypto.Encryption
@@ -463,6 +462,7 @@
 org.apache.hadoop.hbase.types.Struct 
(implements org.apache.hadoop.hbase.types.DataTypeT)
 org.apache.hadoop.hbase.types.StructBuilder
 org.apache.hadoop.hbase.types.StructIterator 
(implements java.util.http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorE)
+org.apache.hadoop.hbase.ipc.SyncCoprocessorRpcChannel (implements 
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel)
 org.apache.hadoop.hbase.mapred.TableInputFormatBase (implements 
org.apache.hadoop.mapred.InputFormatK,V)
 
 org.apache.hadoop.hbase.mapred.TableInputFormat (implements 
org.apache.hadoop.mapred.JobConfigurable)
@@ -772,6 +772,11 @@
 org.apache.hadoop.hbase.mapred.TableReduceK,V
 
 
+com.google.protobuf.BlockingRpcChannel
+
+org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel (also extends 
com.google.protobuf.RpcChannel)
+
+
 org.apache.hadoop.hbase.client.BufferedMutator.ExceptionListener
 org.apache.hadoop.hbase.Cell
 org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy
@@ -796,6 +801,16 @@
 org.apache.hadoop.hbase.types.DataTypeT
 org.apache.hadoop.hbase.io.crypto.Decryptor
 org.apache.hadoop.hbase.io.crypto.Encryptor
+java.util.concurrent.http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">FutureV
+
+io.netty.util.concurrent.FutureV
+
+org.apache.hadoop.hbase.client.FutureV
+
+
+org.apache.hadoop.hbase.client.FutureV
+
+
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Iterable.html?is-external=true;
 title="class or interface in java.lang">IterableT
 
 org.apache.hadoop.hbase.client.ResultScanner 
(also extends java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable)
@@ -818,6 +833,11 @@
 
 
 org.apache.hadoop.hbase.io.crypto.KeyProvider
+com.google.protobuf.RpcChannel
+
+org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel (also extends 
com.google.protobuf.BlockingRpcChannel)
+
+
 org.apache.hadoop.hbase.client.RpcRetryingCallerT
 
 Enum Hierarchy
@@ -829,24 +849,24 @@
 org.apache.hadoop.hbase.util.Order
 org.apache.hadoop.hbase.KeepDeletedCells
 org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
-org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
 org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
 org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
-org.apache.hadoop.hbase.filter.Filter.ReturnCode
+org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
 org.apache.hadoop.hbase.filter.FilterList.Operator
-org.apache.hadoop.hbase.regionserver.BloomType
-org.apache.hadoop.hbase.quotas.ThrottlingException.Type
-org.apache.hadoop.hbase.quotas.QuotaScope
-org.apache.hadoop.hbase.quotas.QuotaType
-org.apache.hadoop.hbase.quotas.ThrottleType
-org.apache.hadoop.hbase.client.Consistency
+org.apache.hadoop.hbase.filter.Filter.ReturnCode
+org.apache.hadoop.hbase.client.CompactType
 org.apache.hadoop.hbase.client.IsolationLevel
+org.apache.hadoop.hbase.client.SnapshotType
 org.apache.hadoop.hbase.client.MasterSwitchType
 org.apache.hadoop.hbase.client.CompactionState
-org.apache.hadoop.hbase.client.CompactType
 org.apache.hadoop.hbase.client.Durability
-org.apache.hadoop.hbase.client.SnapshotType
+org.apache.hadoop.hbase.client.Consistency
 org.apache.hadoop.hbase.client.security.SecurityCapability
+org.apache.hadoop.hbase.quotas.ThrottlingException.Type
+org.apache.hadoop.hbase.quotas.ThrottleType
+org.apache.hadoop.hbase.quotas.QuotaType
+org.apache.hadoop.hbase.quotas.QuotaScope
+org.apache.hadoop.hbase.regionserver.BloomType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/apidocs/src-html/org/apache/hadoop/hbase/client/Future.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Future.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Future.html
new file mode 100644
index 000..29253f4
--- /dev/null
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Future.html
@@ -0,0 +1,103 @@
+http://www.w3.org/TR/html4/loose.dtd;>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to 

[06/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/DataBlockEncoding.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/DataBlockEncoding.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/DataBlockEncoding.html
index 8acb1ef..ecf1293 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/DataBlockEncoding.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/DataBlockEncoding.html
@@ -199,11 +199,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 DataBlockEncoding
-HFileBlockEncodingContext.getDataBlockEncoding()
+HFileBlockDefaultEncodingContext.getDataBlockEncoding()
 
 
 DataBlockEncoding
-HFileBlockDefaultEncodingContext.getDataBlockEncoding()
+HFileBlockEncodingContext.getDataBlockEncoding()
 
 
 static DataBlockEncoding
@@ -233,18 +233,18 @@ the order they are declared.
 
 
 HFileBlockEncodingContext
+BufferedDataBlockEncoder.newDataBlockEncodingContext(DataBlockEncodingencoding,
+  byte[]header,
+  HFileContextmeta)
+
+
+HFileBlockEncodingContext
 DataBlockEncoder.newDataBlockEncodingContext(DataBlockEncodingencoding,
   byte[]headerBytes,
   HFileContextmeta)
 Creates a encoder specific encoding context
 
 
-
-HFileBlockEncodingContext
-BufferedDataBlockEncoder.newDataBlockEncodingContext(DataBlockEncodingencoding,
-  byte[]header,
-  HFileContextmeta)
-
 
 
 
@@ -307,19 +307,19 @@ the order they are declared.
 
 
 DataBlockEncoding
-HFileDataBlockEncoderImpl.getDataBlockEncoding()
+NoOpDataBlockEncoder.getDataBlockEncoding()
 
 
 DataBlockEncoding
-HFileReaderImpl.getDataBlockEncoding()
+HFileDataBlockEncoderImpl.getDataBlockEncoding()
 
 
 DataBlockEncoding
-NoOpDataBlockEncoder.getDataBlockEncoding()
+HFile.Reader.getDataBlockEncoding()
 
 
 DataBlockEncoding
-HFile.Reader.getDataBlockEncoding()
+HFileDataBlockEncoder.getDataBlockEncoding()
 
 
 DataBlockEncoding
@@ -327,7 +327,7 @@ the order they are declared.
 
 
 DataBlockEncoding
-HFileDataBlockEncoder.getDataBlockEncoding()
+HFileReaderImpl.getDataBlockEncoding()
 
 
 DataBlockEncoding
@@ -335,23 +335,23 @@ the order they are declared.
 
 
 DataBlockEncoding
-HFileDataBlockEncoderImpl.getEffectiveEncodingInCache(booleanisCompaction)
+NoOpDataBlockEncoder.getEffectiveEncodingInCache(booleanisCompaction)
 
 
 DataBlockEncoding
-HFileReaderImpl.getEffectiveEncodingInCache(booleanisCompaction)
+HFileDataBlockEncoderImpl.getEffectiveEncodingInCache(booleanisCompaction)
 
 
 DataBlockEncoding
-NoOpDataBlockEncoder.getEffectiveEncodingInCache(booleanisCompaction)
+HFile.Reader.getEffectiveEncodingInCache(booleanisCompaction)
 
 
 DataBlockEncoding
-HFile.Reader.getEffectiveEncodingInCache(booleanisCompaction)
+HFileDataBlockEncoder.getEffectiveEncodingInCache(booleanisCompaction)
 
 
 DataBlockEncoding
-HFileDataBlockEncoder.getEffectiveEncodingInCache(booleanisCompaction)
+HFileReaderImpl.getEffectiveEncodingInCache(booleanisCompaction)
 
 
 
@@ -419,27 +419,27 @@ the order they are declared.
 
 
 HFileBlock
-HFileReaderImpl.readBlock(longdataBlockOffset,
+HFile.CachingBlockReader.readBlock(longoffset,
   longonDiskBlockSize,
   booleancacheBlock,
   booleanpread,
   booleanisCompaction,
   booleanupdateCacheMetrics,
   BlockTypeexpectedBlockType,
-  DataBlockEncodingexpectedDataBlockEncoding)
+  DataBlockEncodingexpectedDataBlockEncoding)
+Read in a file block.
+
 
 
 HFileBlock
-HFile.CachingBlockReader.readBlock(longoffset,
+HFileReaderImpl.readBlock(longdataBlockOffset,
   longonDiskBlockSize,
   booleancacheBlock,
   booleanpread,
   booleanisCompaction,
   booleanupdateCacheMetrics,
   BlockTypeexpectedBlockType,
-  DataBlockEncodingexpectedDataBlockEncoding)
-Read in a file block.
-
+  DataBlockEncodingexpectedDataBlockEncoding)
 
 
 HFileBlock

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/EncodingState.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/EncodingState.html 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/EncodingState.html
index 3984623..3d1d2f6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/EncodingState.html
+++ 

[51/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/33c287c2
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/33c287c2
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/33c287c2

Branch: refs/heads/asf-site
Commit: 33c287c2e723f19e1e34d30d3cb37bb16fd232bc
Parents: 28d625a
Author: jenkins 
Authored: Sun May 8 15:02:28 2016 +
Committer: Misty Stanley-Jones 
Committed: Mon May 9 09:49:42 2016 -0700

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 4 +-
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 apidocs/allclasses-frame.html   | 4 +-
 apidocs/allclasses-noframe.html | 4 +-
 apidocs/index-all.html  |28 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   | 4 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   244 +-
 .../hadoop/hbase/class-use/ServerName.html  | 2 +-
 .../hadoop/hbase/class-use/TableName.html   | 8 +-
 .../org/apache/hadoop/hbase/client/Admin.html   | 8 +-
 .../apache/hadoop/hbase/client/Consistency.html | 4 +-
 .../apache/hadoop/hbase/client/Durability.html  | 4 +-
 .../org/apache/hadoop/hbase/client/Future.html  |   195 +
 apidocs/org/apache/hadoop/hbase/client/Get.html | 4 +-
 .../org/apache/hadoop/hbase/client/Table.html   | 4 +-
 .../hbase/client/class-use/Consistency.html | 8 +-
 .../hbase/client/class-use/Durability.html  |16 +-
 .../hadoop/hbase/client/class-use/Future.html   |   115 +
 .../hbase/client/class-use/IsolationLevel.html  | 8 +-
 .../hadoop/hbase/client/class-use/Result.html   |36 +-
 .../hadoop/hbase/client/class-use/Scan.html |15 +-
 .../hadoop/hbase/client/class-use/Table.html| 4 +-
 .../hadoop/hbase/client/package-frame.html  | 1 +
 .../hadoop/hbase/client/package-summary.html|16 +-
 .../hadoop/hbase/client/package-tree.html   |15 +-
 .../filter/class-use/ByteArrayComparable.html   | 8 +-
 .../class-use/CompareFilter.CompareOp.html  | 8 +-
 .../filter/class-use/Filter.ReturnCode.html |62 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |58 +-
 .../hadoop/hbase/filter/package-tree.html   | 4 +-
 .../io/class-use/ImmutableBytesWritable.html|48 +-
 .../hadoop/hbase/io/class-use/TimeRange.html| 8 +-
 .../hbase/io/crypto/class-use/Cipher.html   | 8 +-
 .../hbase/io/encoding/DataBlockEncoding.html| 4 +-
 .../hadoop/hbase/ipc/CallTimeoutException.html  | 4 +-
 .../hadoop/hbase/ipc/CoprocessorRpcChannel.html |   121 +-
 .../hadoop/hbase/ipc/FailedServerException.html | 4 +-
 .../hbase/ipc/StoppedRpcClientException.html| 4 +-
 .../hbase/ipc/SyncCoprocessorRpcChannel.html|   276 +
 .../ipc/UnsupportedCellCodecException.html  | 4 +-
 .../ipc/class-use/CoprocessorRpcChannel.html|55 +-
 .../class-use/SyncCoprocessorRpcChannel.html|   115 +
 .../apache/hadoop/hbase/ipc/package-frame.html  | 6 +-
 .../hadoop/hbase/ipc/package-summary.html   |20 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   |15 +-
 .../apache/hadoop/hbase/ipc/package-use.html|10 +-
 .../hadoop/hbase/mapreduce/CellCounter.html | 6 +-
 .../hbase/mapreduce/TableInputFormat.html   |45 +-
 .../mapreduce/class-use/TableRecordReader.html  | 4 +-
 .../apache/hadoop/hbase/quotas/QuotaType.html   | 4 +-
 .../hbase/quotas/ThrottlingException.Type.html  | 4 +-
 .../hadoop/hbase/quotas/package-tree.html   | 4 +-
 .../hadoop/hbase/rest/client/RemoteHTable.html  | 4 +-
 .../hadoop/hbase/util/class-use/ByteRange.html  |40 +-
 .../hadoop/hbase/util/class-use/Order.html  |40 +-
 .../util/class-use/PositionedByteRange.html |   356 +-
 apidocs/overview-tree.html  |42 +-
 .../org/apache/hadoop/hbase/client/Future.html  |   103 +
 .../hadoop/hbase/ipc/CallTimeoutException.html  | 8 +-
 .../hadoop/hbase/ipc/CoprocessorRpcChannel.html |86 +-
 .../hbase/ipc/SyncCoprocessorRpcChannel.html|   151 +
 .../hadoop/hbase/mapreduce/CellCounter.html |   443 +-
 .../hbase/mapreduce/TableInputFormat.html   |   304 +-
 book.html   | 2 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 27914 -
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html  

[10/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
index e39c4cd..ae29bc8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
@@ -152,11 +152,11 @@
 
 
 private RegionCoprocessorEnvironment
-AggregateImplementation.env
+MultiRowMutationEndpoint.env
 
 
 private RegionCoprocessorEnvironment
-MultiRowMutationEndpoint.env
+AggregateImplementation.env
 
 
 private RegionCoprocessorEnvironment
@@ -1445,11 +1445,11 @@
 
 
 private RegionCoprocessorEnvironment
-BulkDeleteEndpoint.env
+RowCountEndpoint.env
 
 
 private RegionCoprocessorEnvironment
-RowCountEndpoint.env
+BulkDeleteEndpoint.env
 
 
 
@@ -1920,14 +1920,14 @@
 
 
 void
-DefaultVisibilityLabelServiceImpl.init(RegionCoprocessorEnvironmente)
-
-
-void
 VisibilityLabelService.init(RegionCoprocessorEnvironmente)
 System calls this after opening of regions.
 
 
+
+void
+DefaultVisibilityLabelServiceImpl.init(RegionCoprocessorEnvironmente)
+
 
 private void
 VisibilityController.initVisibilityLabelService(RegionCoprocessorEnvironmentenv)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
index a6609dd..f76650a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
@@ -110,63 +110,69 @@
 
 
 ReplicationEndpoint
+BaseRegionServerObserver.postCreateReplicationEndPoint(ObserverContextRegionServerCoprocessorEnvironmentctx,
+  ReplicationEndpointendpoint)
+
+
+ReplicationEndpoint
 RegionServerObserver.postCreateReplicationEndPoint(ObserverContextRegionServerCoprocessorEnvironmentctx,
   ReplicationEndpointendpoint)
 This will be called after the replication endpoint is 
instantiated.
 
 
-
-ReplicationEndpoint
-BaseRegionServerObserver.postCreateReplicationEndPoint(ObserverContextRegionServerCoprocessorEnvironmentctx,
-  ReplicationEndpointendpoint)
-
 
 void
-RegionServerObserver.postMerge(ObserverContextRegionServerCoprocessorEnvironmentc,
+BaseRegionServerObserver.postMerge(ObserverContextRegionServerCoprocessorEnvironmentc,
   RegionregionA,
   RegionregionB,
-  RegionmergedRegion)
-called after the regions merge.
-
+  RegionmergedRegion)
 
 
 void
-BaseRegionServerObserver.postMerge(ObserverContextRegionServerCoprocessorEnvironmentc,
+RegionServerObserver.postMerge(ObserverContextRegionServerCoprocessorEnvironmentc,
   RegionregionA,
   RegionregionB,
-  RegionmergedRegion)
+  RegionmergedRegion)
+called after the regions merge.
+
 
 
 void
-RegionServerObserver.postMergeCommit(ObserverContextRegionServerCoprocessorEnvironmentctx,
+BaseRegionServerObserver.postMergeCommit(ObserverContextRegionServerCoprocessorEnvironmentctx,
   RegionregionA,
   RegionregionB,
-  RegionmergedRegion)
-This will be called after PONR step as part of regions 
merge transaction.
-
+  RegionmergedRegion)
 
 
 void
-BaseRegionServerObserver.postMergeCommit(ObserverContextRegionServerCoprocessorEnvironmentctx,
+RegionServerObserver.postMergeCommit(ObserverContextRegionServerCoprocessorEnvironmentctx,
   RegionregionA,
   RegionregionB,
-  RegionmergedRegion)
+  RegionmergedRegion)
+This will be called after PONR step as part of regions 
merge transaction.
+
 
 
 void
+BaseRegionServerObserver.postReplicateLogEntries(ObserverContextRegionServerCoprocessorEnvironmentctx,
+  http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntryentries,
+ 

[04/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html
index 8c3c8ed..914cdeb 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/CachedBlock.html
@@ -142,7 +142,7 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorCachedBlock
-BlockCache.iterator()
+CombinedBlockCache.iterator()
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorCachedBlock
@@ -150,7 +150,7 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorCachedBlock
-CombinedBlockCache.iterator()
+BlockCache.iterator()
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorCachedBlock

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/FixedFileTrailer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/FixedFileTrailer.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/FixedFileTrailer.html
index d93549c..d11a033 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/FixedFileTrailer.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/FixedFileTrailer.html
@@ -112,11 +112,11 @@
 
 
 FixedFileTrailer
-HFileReaderImpl.getTrailer()
+HFile.Reader.getTrailer()
 
 
 FixedFileTrailer
-HFile.Reader.getTrailer()
+HFileReaderImpl.getTrailer()
 
 
 static FixedFileTrailer

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.FileInfo.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.FileInfo.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.FileInfo.html
index ab906fc..6c88fef 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.FileInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.FileInfo.html
@@ -98,15 +98,15 @@
 
 
 
-private HFile.FileInfo
-HFileReaderImpl.fileInfo
-
-
 protected HFile.FileInfo
 HFileWriterImpl.fileInfo
 A "file info" block: a key-value map of file-wide 
metadata.
 
 
+
+private HFile.FileInfo
+HFileReaderImpl.fileInfo
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Reader.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Reader.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Reader.html
index 72147c0..41e5b66 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Reader.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Reader.html
@@ -121,15 +121,15 @@
 
 
 
-protected HFile.Reader
-HFileReaderImpl.HFileScannerImpl.reader
-
-
 private HFile.Reader
 CompoundBloomFilter.reader
 Used to load chunks on demand
 
 
+
+protected HFile.Reader
+HFileReaderImpl.HFileScannerImpl.reader
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
index 6ae2569..946514f 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFile.Writer.html
@@ -135,11 +135,11 @@
 
 
 void
-HFileDataBlockEncoderImpl.saveMetadata(HFile.Writerwriter)
+NoOpDataBlockEncoder.saveMetadata(HFile.Writerwriter)
 
 
 void
-NoOpDataBlockEncoder.saveMetadata(HFile.Writerwriter)
+HFileDataBlockEncoderImpl.saveMetadata(HFile.Writerwriter)
 
 
 void

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.FSReader.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.FSReader.html
 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/HFileBlock.FSReader.html
index 3b45fc0..9e71685 100644
--- 

[22/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/client/MultiServerCallable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/MultiServerCallable.html 
b/devapidocs/org/apache/hadoop/hbase/client/MultiServerCallable.html
index 439e0c3..802c5a2 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/MultiServerCallable.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/MultiServerCallable.html
@@ -86,6 +86,9 @@
 http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
 
 
+org.apache.hadoop.hbase.client.AbstractRegionServerCallableT
+
+
 org.apache.hadoop.hbase.client.RegionServerCallableT
 
 
@@ -100,13 +103,15 @@
 
 
 
+
+
 
 
 
 Type Parameters:R - 

 
 All Implemented Interfaces:
-Cancellable, RetryingCallableMultiResponse
+Cancellable, RetryingCallableMultiResponse, RetryingCallableBase
 
 
 
@@ -150,11 +155,11 @@ extends controller
 
 
-
+
 
 
-Fields inherited from classorg.apache.hadoop.hbase.client.RegionServerCallable
-connection,
 location,
 MIN_WAIT_DEAD_SERVER,
 row,
 tableName
+Fields inherited from classorg.apache.hadoop.hbase.client.AbstractRegionServerCallable
+connection,
 location,
 MIN_WAIT_DEAD_SERVER,
 row,
 tableName
 
 
 
@@ -236,7 +241,14 @@ extends RegionServerCallable
-getConnection,
 getExceptionMessageAdditionalDetail,
 getRow,
 getStub,
 getTableName,
 setLocation,
 setStub,
 sleep, throwable
+getStub,
 setClientByServiceName,
 setStub
+
+
+
+
+
+Methods inherited from classorg.apache.hadoop.hbase.client.AbstractRegionServerCallable
+getConnection,
 getExceptionMessageAdditionalDetail,
 getRow,
 getTableName,
 setLocation,
 sleep,
 throwable
 
 
 
@@ -245,6 +257,13 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#clone()"
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#equals(java.lang.Object)"
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#finalize()"
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#getClass()"
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#hashCode()"
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notify()"
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/7/docs/api/java/lang
 /Object.html?is-external=true#notifyAll()" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()"
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait()"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long)"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long,%20int)"
 title="class or interface in java.lang">wait
 
+
+
+
+
+Methods inherited from interfaceorg.apache.hadoop.hbase.client.RetryingCallableBase
+getExceptionMessageAdditionalDetail,
 sleep,
 throwable
+
 
 
 
@@ -315,7 +334,7 @@ extends HRegionLocationgetLocation()
 
 Overrides:
-getLocationin
 classRegionServerCallableMultiResponse
+getLocationin
 classAbstractRegionServerCallableMultiResponse
 
 
 
@@ -328,7 +347,7 @@ extends HRegionInfogetHRegionInfo()
 
 Overrides:
-getHRegionInfoin
 classRegionServerCallableMultiResponse
+getHRegionInfoin
 classAbstractRegionServerCallableMultiResponse
 Returns:the HRegionInfo for the 
current region
 
 
@@ -374,16 +393,16 @@ extends 
 
 prepare
-publicvoidprepare(booleanreload)
+publicvoidprepare(booleanreload)
  throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
-Description copied from class:RegionServerCallable
+Description copied from class:AbstractRegionServerCallable
 Prepare for connection to the server hosting region with 
row from tablename.  Does lookup
  to find region location and hosting server.
 
 Specified by:
-preparein
 interfaceRetryingCallableMultiResponse
+preparein
 interfaceRetryingCallableBase
 Overrides:
-preparein
 classRegionServerCallableMultiResponse
+preparein
 classAbstractRegionServerCallableMultiResponse
 Parameters:reload - Set 
to true to re-check the table state
 Throws:
 

[32/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
index 442ab80..f7ffac9 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
@@ -270,7 +270,7 @@ service.
 
 
 protected HRegionLocation
-RegionServerCallable.location
+AbstractRegionServerCallable.location
 
 
 
@@ -302,7 +302,7 @@ service.
 
 
 protected HRegionLocation
-RegionServerCallable.getLocation()
+AbstractRegionServerCallable.getLocation()
 
 
 HRegionLocation
@@ -310,26 +310,26 @@ service.
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[]row)
+HRegionLocator.getRegionLocation(byte[]row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[]row)
+RegionLocator.getRegionLocation(byte[]row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[]row,
+HRegionLocator.getRegionLocation(byte[]row,
   booleanreload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[]row,
+RegionLocator.getRegionLocation(byte[]row,
   booleanreload)
 Finds the region on which the given row is being 
served.
 
@@ -352,17 +352,17 @@ service.
 
 
 HRegionLocation
-ClusterConnection.getRegionLocation(TableNametableName,
+ConnectionImplementation.getRegionLocation(TableNametableName,
   byte[]row,
-  booleanreload)
-Find region location hosting passed row
-
+  booleanreload)
 
 
 HRegionLocation
-ConnectionImplementation.getRegionLocation(TableNametableName,
+ClusterConnection.getRegionLocation(TableNametableName,
   byte[]row,
-  booleanreload)
+  booleanreload)
+Find region location hosting passed row
+
 
 
 HRegionLocation
@@ -380,13 +380,13 @@ service.
 
 
 HRegionLocation
-ClusterConnection.locateRegion(byte[]regionName)
-Gets the location of the region of regionName.
-
+ConnectionImplementation.locateRegion(byte[]regionName)
 
 
 HRegionLocation
-ConnectionImplementation.locateRegion(byte[]regionName)
+ClusterConnection.locateRegion(byte[]regionName)
+Gets the location of the region of regionName.
+
 
 
 HRegionLocation
@@ -412,17 +412,17 @@ service.
 
 
 HRegionLocation
+ConnectionImplementation.locateRegion(TableNametableName,
+byte[]row)
+
+
+HRegionLocation
 ClusterConnection.locateRegion(TableNametableName,
 byte[]row)
 Find the location of the region of tableName that 
row
  lives in.
 
 
-
-HRegionLocation
-ConnectionImplementation.locateRegion(TableNametableName,
-byte[]row)
-
 
 HRegionLocation
 HConnection.locateRegion(TableNametableName,
@@ -448,17 +448,17 @@ service.
 
 
 HRegionLocation
+ConnectionImplementation.relocateRegion(TableNametableName,
+byte[]row)
+
+
+HRegionLocation
 ClusterConnection.relocateRegion(TableNametableName,
 byte[]row)
 Find the location of the region of tableName that 
row
  lives in, ignoring any value that might be in the cache.
 
 
-
-HRegionLocation
-ConnectionImplementation.relocateRegion(TableNametableName,
-byte[]row)
-
 
 HRegionLocation
 HConnection.relocateRegion(TableNametableName,
@@ -479,13 +479,13 @@ service.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocation
-RegionLocator.getAllRegionLocations()
-Retrieves all of the regions associated with this 
table.
-
+HRegionLocator.getAllRegionLocations()
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocation
-HRegionLocator.getAllRegionLocations()
+RegionLocator.getAllRegionLocations()
+Retrieves all of the regions associated with this 
table.
+
 
 
 private Pairhttp://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocation
@@ -536,13 +536,13 @@ service.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionLocation
-ClusterConnection.locateRegions(TableNametableName)
-Gets the locations of all regions in 

[29/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/RegionLocations.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/RegionLocations.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/RegionLocations.html
index fa26814..d1f0c00 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/RegionLocations.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/RegionLocations.html
@@ -194,26 +194,26 @@
 
booleanuseCache)
 
 
-RegionLocations
-MetaCache.getCachedLocation(TableNametableName,
+(package private) RegionLocations
+ConnectionImplementation.getCachedLocation(TableNametableName,
   byte[]row)
 Search the cache for a location that fits our table and row 
key.
 
 
 
-(package private) RegionLocations
-ConnectionImplementation.getCachedLocation(TableNametableName,
+RegionLocations
+MetaCache.getCachedLocation(TableNametableName,
   byte[]row)
 Search the cache for a location that fits our table and row 
key.
 
 
 
 RegionLocations
-Registry.getMetaRegionLocation()
+ZooKeeperRegistry.getMetaRegionLocation()
 
 
 RegionLocations
-ZooKeeperRegistry.getMetaRegionLocation()
+Registry.getMetaRegionLocation()
 
 
 (package private) static RegionLocations
@@ -239,21 +239,21 @@
 
 
 RegionLocations
-ClusterConnection.locateRegion(TableNametableName,
+ConnectionImplementation.locateRegion(TableNametableName,
 byte[]row,
 booleanuseCache,
 booleanretry)
 
 
 RegionLocations
-ConnectionImplementation.locateRegion(TableNametableName,
+ClusterConnection.locateRegion(TableNametableName,
 byte[]row,
 booleanuseCache,
 booleanretry)
 
 
 RegionLocations
-ClusterConnection.locateRegion(TableNametableName,
+ConnectionImplementation.locateRegion(TableNametableName,
 byte[]row,
 booleanuseCache,
 booleanretry,
@@ -261,7 +261,7 @@
 
 
 RegionLocations
-ConnectionImplementation.locateRegion(TableNametableName,
+ClusterConnection.locateRegion(TableNametableName,
 byte[]row,
 booleanuseCache,
 booleanretry,
@@ -277,6 +277,12 @@
 
 
 RegionLocations
+ConnectionImplementation.relocateRegion(TableNametableName,
+byte[]row,
+intreplicaId)
+
+
+RegionLocations
 ClusterConnection.relocateRegion(TableNametableName,
 byte[]row,
 intreplicaId)
@@ -284,12 +290,6 @@
  lives in, ignoring any value that might be in the cache.
 
 
-
-RegionLocations
-ConnectionImplementation.relocateRegion(TableNametableName,
-byte[]row,
-intreplicaId)
-
 
 
 
@@ -343,20 +343,20 @@
 
 
 void
-ClusterConnection.cacheLocation(TableNametableName,
-  RegionLocationslocation)
+ConnectionImplementation.cacheLocation(TableNametableName,
+  RegionLocationslocation)
+Put a newly discovered HRegionLocation into the cache.
+
 
 
 void
-MetaCache.cacheLocation(TableNametableName,
-  RegionLocationslocations)
-Put a newly discovered HRegionLocation into the cache.
-
+ClusterConnection.cacheLocation(TableNametableName,
+  RegionLocationslocation)
 
 
 void
-ConnectionImplementation.cacheLocation(TableNametableName,
-  RegionLocationslocation)
+MetaCache.cacheLocation(TableNametableName,
+  RegionLocationslocations)
 Put a newly discovered HRegionLocation into the cache.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
index ca5e8f5..235a032 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
@@ -166,13 +166,13 @@
 
 
 
-protected Server
-ZkCoordinatedStateManager.server
-
-
 private Server
 ZKSplitLogManagerCoordination.server
 
+
+protected Server
+ZkCoordinatedStateManager.server
+
 
 
 
@@ -332,17 +332,17 @@
 ServerManager.master
 
 
-private Server
-RegionStateStore.server
-
-
 protected Server
 BulkAssigner.server
 
-
+
 private Server
 CatalogJanitor.server
 
+
+private Server
+RegionStateStore.server
+
 
 private Server
 SplitLogManager.server
@@ -448,12 +448,6 @@
 TableLockManagertableLockManager,
 

[47/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/apidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.html 
b/apidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.html
index 0285628..d5055e3 100644
--- a/apidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.html
+++ b/apidocs/org/apache/hadoop/hbase/ipc/CoprocessorRpcChannel.html
@@ -62,14 +62,14 @@
 Summary:
 Nested|
 Field|
-Constr|
-Method
+Constr|
+Method
 
 
 Detail:
 Field|
-Constr|
-Method
+Constr|
+Method
 
 
 
@@ -79,32 +79,27 @@
 
 
 org.apache.hadoop.hbase.ipc
-Class 
CoprocessorRpcChannel
+Interface 
CoprocessorRpcChannel
 
 
-
-http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
-
-
-org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel
-
-
-
 
 
 
 
-All Implemented Interfaces:
+All Superinterfaces:
 com.google.protobuf.BlockingRpcChannel, com.google.protobuf.RpcChannel
 
+
+All Known Implementing Classes:
+SyncCoprocessorRpcChannel
+
 
 
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public abstract class CoprocessorRpcChannel
-extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-implements com.google.protobuf.RpcChannel, 
com.google.protobuf.BlockingRpcChannel
-Base class which provides clients with an RPC connection to
+public interface CoprocessorRpcChannel
+extends com.google.protobuf.RpcChannel, 
com.google.protobuf.BlockingRpcChannel
+Base interface which provides clients with an RPC 
connection to
  call coprocessor endpoint Services.
  Note that clients should not use this class directly, except through
  Table.coprocessorService(byte[]).
@@ -114,95 +109,25 @@ implements com.google.protobuf.RpcChannel, 
com.google.protobuf.BlockingRpcChanne
 
 
 
-
-
-
-
-
-Constructor Summary
-
-Constructors
-
-Constructor and Description
-
-
-CoprocessorRpcChannel()
-
-
-
-
 
 
 
 
 
 Method Summary
-
-Methods
-
-Modifier and Type
-Method and Description
-
-
-protected abstract 
com.google.protobuf.Message
-callExecService(com.google.protobuf.RpcControllercontroller,
-  
com.google.protobuf.Descriptors.MethodDescriptormethod,
-  com.google.protobuf.Messagerequest,
-  
com.google.protobuf.MessageresponsePrototype)
-
-
 
-
+
 
 
-Methods inherited from classjava.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#clone()"
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#equals(java.lang.Object)"
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#finalize()"
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#getClass()"
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#hashCode()"
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notify()"
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/7/docs/api/java/lang
 /Object.html?is-external=true#notifyAll()" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()"
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait()"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long)"
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait(long,%20int)"
 title="class or interface in java.lang">wait
-
-
+Methods inherited from interfacecom.google.protobuf.RpcChannel
+callMethod
 
-
-
-
-
 
-
-
-
-
-
-
-Constructor Detail
-
+
 
 
-
-
-CoprocessorRpcChannel
-publicCoprocessorRpcChannel()
-
-
-
-
-
-
-
-
-
-Method Detail
-
-
-
-
-
-callExecService
-protected abstractcom.google.protobuf.MessagecallExecService(com.google.protobuf.RpcControllercontroller,
-  
com.google.protobuf.Descriptors.MethodDescriptormethod,
-  
com.google.protobuf.Messagerequest,
-  
com.google.protobuf.MessageresponsePrototype)
-throws 

[02/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.html
index 9fa12cd..34394b2 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.html
@@ -36,7 +36,7 @@
 
 
 Prev 
Class
-Next 
Class
+Next 
Class
 
 
 Frames
@@ -60,15 +60,15 @@
 
 
 Summary:
-Nested|
-Field|
-Constr|
+Nested|
+Field|
+Constr|
 Method
 
 
 Detail:
-Field|
-Constr|
+Field|
+Constr|
 Method
 
 
@@ -79,171 +79,27 @@
 
 
 org.apache.hadoop.hbase.ipc
-Class AsyncRpcChannel
+Interface 
AsyncRpcChannel
 
 
-
-http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">java.lang.Object
-
-
-org.apache.hadoop.hbase.ipc.AsyncRpcChannel
-
-
-
 
 
 
+
+All Known Implementing Classes:
+AsyncRpcChannelImpl
+
 
 
 @InterfaceAudience.Private
-public class AsyncRpcChannel
-extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-Netty RPC channel
+public interface AsyncRpcChannel
+Interface for Async Rpc Channels
 
 
 
 
 
 
-
-
-
-
-
-Nested Class Summary
-
-Nested Classes
-
-Modifier and Type
-Class and Description
-
-
-private static class
-AsyncRpcChannel.CallWriteListener
-Listens to call writes and fails if write failed
-
-
-
-
-
-
-
-
-
-
-Field Summary
-
-Fields
-
-Modifier and Type
-Field and Description
-
-
-(package private) http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true;
 title="class or interface in java.net">InetSocketAddress
-address
-
-
-(package private) AuthMethod
-authMethod
-
-
-private io.netty.channel.Channel
-channel
-
-
-private io.netty.util.Timeout
-cleanupTimer
-
-
-(package private) AsyncRpcClient
-client
-
-
-private boolean
-closed
-
-
-private boolean
-connected
-
-
-private int
-failureCounter
-
-
-private static 
org.apache.commons.logging.Log
-LOG
-
-
-private static int
-MAX_SASL_RETRIES
-
-
-(package private) http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-name
-
-
-private http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer,AsyncCall
-pendingCalls
-
-
-private int
-reloginMaxBackoff
-
-
-private http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-serverPrincipal
-
-
-(package private) http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-serviceName
-
-
-(package private) User
-ticket
-
-
-private io.netty.util.TimerTask
-timeoutTask
-
-
-private org.apache.hadoop.security.token.Token? 
extends org.apache.hadoop.security.token.TokenIdentifier
-token
-
-
-protected static http://docs.oracle.com/javase/7/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in 
java.util">Maporg.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind,org.apache.hadoop.security.token.TokenSelector?
 extends org.apache.hadoop.security.token.TokenIdentifier
-TOKEN_HANDDLERS
-
-
-(package private) boolean
-useSasl
-
-
-
-
-
-
-
-
-
-Constructor Summary
-
-Constructors
-
-Constructor and Description
-
-
-AsyncRpcChannel(io.netty.bootstrap.Bootstrapbootstrap,
-  AsyncRpcClientclient,
-  Userticket,
-  http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringserviceName,
-  http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true;
 title="class or interface in 
java.net">InetSocketAddressaddress)
-Constructor for netty RPC channel
-
-
-
-
-
 
 
 
@@ -257,140 +113,44 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 Method and Description
 
 
-private 
org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation
-buildUserInfo(org.apache.hadoop.security.UserGroupInformationugi,
-  AuthMethodauthMethod)
-Build the user information
-
-
-
-io.netty.util.concurrent.Promisecom.google.protobuf.Message
-callMethod(com.google.protobuf.Descriptors.MethodDescriptormethod,
-PayloadCarryingRpcControllercontroller,
+R extends 
com.google.protobuf.Message,OFutureO
+callMethod(com.google.protobuf.Descriptors.MethodDescriptormethod,
 

[35/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
index aa8b08a..4d8ccff 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
@@ -346,11 +346,11 @@
 
 
 void
-BaseMasterAndRegionObserver.start(CoprocessorEnvironmentctx)
+BaseWALObserver.start(CoprocessorEnvironmente)
 
 
 void
-AggregateImplementation.start(CoprocessorEnvironmentenv)
+MultiRowMutationEndpoint.start(CoprocessorEnvironmentenv)
 Stores a reference to the coprocessor environment provided 
by the
  RegionCoprocessorHost 
from the region where this
  coprocessor is loaded.
@@ -358,27 +358,27 @@
 
 
 void
-BaseMasterObserver.start(CoprocessorEnvironmentctx)
+BaseRegionObserver.start(CoprocessorEnvironmente)
 
 
 void
-BaseRegionObserver.start(CoprocessorEnvironmente)
+AggregateImplementation.start(CoprocessorEnvironmentenv)
+Stores a reference to the coprocessor environment provided 
by the
+ RegionCoprocessorHost 
from the region where this
+ coprocessor is loaded.
+
 
 
 void
-BaseWALObserver.start(CoprocessorEnvironmente)
+BaseRegionServerObserver.start(CoprocessorEnvironmentenv)
 
 
 void
-BaseRegionServerObserver.start(CoprocessorEnvironmentenv)
+BaseMasterAndRegionObserver.start(CoprocessorEnvironmentctx)
 
 
 void
-MultiRowMutationEndpoint.start(CoprocessorEnvironmentenv)
-Stores a reference to the coprocessor environment provided 
by the
- RegionCoprocessorHost 
from the region where this
- coprocessor is loaded.
-
+BaseMasterObserver.start(CoprocessorEnvironmentctx)
 
 
 void
@@ -390,31 +390,31 @@
 
 
 void
-BaseMasterAndRegionObserver.stop(CoprocessorEnvironmentctx)
+BaseWALObserver.stop(CoprocessorEnvironmente)
 
 
 void
-AggregateImplementation.stop(CoprocessorEnvironmentenv)
+MultiRowMutationEndpoint.stop(CoprocessorEnvironmentenv)
 
 
 void
-BaseMasterObserver.stop(CoprocessorEnvironmentctx)
+BaseRegionObserver.stop(CoprocessorEnvironmente)
 
 
 void
-BaseRegionObserver.stop(CoprocessorEnvironmente)
+AggregateImplementation.stop(CoprocessorEnvironmentenv)
 
 
 void
-BaseWALObserver.stop(CoprocessorEnvironmente)
+BaseRegionServerObserver.stop(CoprocessorEnvironmentenv)
 
 
 void
-BaseRegionServerObserver.stop(CoprocessorEnvironmentenv)
+BaseMasterAndRegionObserver.stop(CoprocessorEnvironmentctx)
 
 
 void
-MultiRowMutationEndpoint.stop(CoprocessorEnvironmentenv)
+BaseMasterObserver.stop(CoprocessorEnvironmentctx)
 
 
 void
@@ -436,27 +436,27 @@
 
 
 void
-BulkDeleteEndpoint.start(CoprocessorEnvironmentenv)
-
-
-void
 RowCountEndpoint.start(CoprocessorEnvironmentenv)
 Stores a reference to the coprocessor environment provided 
by the
  RegionCoprocessorHost 
from the region where this
  coprocessor is loaded.
 
 
+
+void
+BulkDeleteEndpoint.start(CoprocessorEnvironmentenv)
+
 
 void
 ZooKeeperScanPolicyObserver.start(CoprocessorEnvironmente)
 
 
 void
-BulkDeleteEndpoint.stop(CoprocessorEnvironmentenv)
+RowCountEndpoint.stop(CoprocessorEnvironmentenv)
 
 
 void
-RowCountEndpoint.stop(CoprocessorEnvironmentenv)
+BulkDeleteEndpoint.stop(CoprocessorEnvironmentenv)
 
 
 void
@@ -589,19 +589,19 @@
 
 
 void
-AccessController.start(CoprocessorEnvironmentenv)
+SecureBulkLoadEndpoint.start(CoprocessorEnvironmentenv)
 
 
 void
-SecureBulkLoadEndpoint.start(CoprocessorEnvironmentenv)
+AccessController.start(CoprocessorEnvironmentenv)
 
 
 void
-AccessController.stop(CoprocessorEnvironmentenv)
+SecureBulkLoadEndpoint.stop(CoprocessorEnvironmentenv)
 
 
 void
-SecureBulkLoadEndpoint.stop(CoprocessorEnvironmentenv)
+AccessController.stop(CoprocessorEnvironmentenv)
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
index fd60be4..e4c8960 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
@@ -642,14 +642,14 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListNormalizationPlan
-SimpleRegionNormalizer.computePlanForTable(TableNametable)
-Computes next most "urgent" normalization action on the 
table.
+RegionNormalizer.computePlanForTable(TableNametable)
+Computes next optimal normalization plan.
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListNormalizationPlan

[37/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index c07c93b..99b92ee 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -599,6 +599,16 @@ service.
 
 
 int
+CellComparator.compare(Cella,
+  Cellb)
+
+
+int
+CellComparator.RowComparator.compare(Cella,
+  Cellb)
+
+
+int
 KeyValue.MetaComparator.compare(Cellleft,
   Cellright)
 Deprecated.
@@ -623,16 +633,6 @@ service.
 
 
 
-int
-CellComparator.compare(Cella,
-  Cellb)
-
-
-int
-CellComparator.RowComparator.compare(Cella,
-  Cellb)
-
-
 private int
 CellComparator.compare(Cella,
   Cellb,
@@ -808,37 +808,37 @@ service.
 
 
 int
-KeyValue.KVComparator.compareRows(Cellleft,
-  Cellright)
-Deprecated.
-
-
-
-int
 CellComparator.compareRows(Cellleft,
   Cellright)
 Compares the rows of the left and right cell.
 
 
-
+
 int
 CellComparator.MetaCellComparator.compareRows(Cellleft,
   Cellright)
 
-
+
 int
-KeyValue.KVComparator.compareTimestamps(Cellleft,
-  Cellright)
+KeyValue.KVComparator.compareRows(Cellleft,
+  Cellright)
 Deprecated.
 
 
-
+
 static int
 CellComparator.compareTimestamps(Cellleft,
   Cellright)
 Compares cell's timestamps in DESCENDING order.
 
 
+
+int
+KeyValue.KVComparator.compareTimestamps(Cellleft,
+  Cellright)
+Deprecated.
+
+
 
 static int
 CellComparator.compareValue(Cellcell,
@@ -1253,32 +1253,32 @@ service.
 
 
 
-static boolean
-CellUtil.matchingRowColumn(Cellleft,
+boolean
+KeyValue.KVComparator.matchingRowColumn(Cellleft,
   Cellright)
+Deprecated.
 Compares the row and column of two keyvalues for 
equality
 
 
 
-boolean
-KeyValue.KVComparator.matchingRowColumn(Cellleft,
+static boolean
+CellUtil.matchingRowColumn(Cellleft,
   Cellright)
-Deprecated.
 Compares the row and column of two keyvalues for 
equality
 
 
 
-static boolean
-CellUtil.matchingRows(Cellleft,
+boolean
+KeyValue.KVComparator.matchingRows(Cellleft,
 Cellright)
+Deprecated.
 Compares the row of two keyvalues for equality
 
 
 
-boolean
-KeyValue.KVComparator.matchingRows(Cellleft,
+static boolean
+CellUtil.matchingRows(Cellleft,
 Cellright)
-Deprecated.
 Compares the row of two keyvalues for equality
 
 
@@ -1638,23 +1638,23 @@ service.
 
 
 
+Put
+Put.add(Cellkv)
+Add the specified KeyValue to this Put operation.
+
+
+
 Append
 Append.add(Cellcell)
 Add column and value to this Append operation.
 
 
-
+
 Increment
 Increment.add(Cellcell)
 Add the specified KeyValue to this operation.
 
 
-
-Put
-Put.add(Cellkv)
-Add the specified KeyValue to this Put operation.
-
-
 
 Delete
 Delete.addDeleteMarker(Cellkv)
@@ -1743,26 +1743,26 @@ service.
 booleanpartial)
 
 
-Append
-Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
-
-
 Delete
 Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 
-
-Increment
-Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
-
 
+Put
+Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
+
+
 Mutation
 Mutation.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 Method for setting the put's familyMap
 
 
+
+Append

[41/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/coc.html
--
diff --git a/coc.html b/coc.html
index b675554..a2ea8a2 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Code of Conduct Policy
@@ -331,7 +331,7 @@ For flagrant violations requiring a firm response the PMC 
may opt to skip early
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-05-06
+  Last Published: 
2016-05-08
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index f9e2c9f..efee028 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -673,7 +673,7 @@ Now your HBase server is running, start 
coding and build that next
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-05-06
+  Last Published: 
2016-05-08
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index 52cd10c..8cac056 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependencies
 
@@ -518,7 +518,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-05-06
+  Last Published: 
2016-05-08
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index d456a57..e89d4cf 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Reactor Dependency Convergence
 
@@ -1703,7 +1703,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-05-06
+  Last Published: 
2016-05-08
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index cd24965..0d22600 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Dependency Information
 
@@ -312,7 +312,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-05-06
+  Last Published: 
2016-05-08
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index 46f9891..9fa1734 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependency Management
 
@@ -798,7 +798,7 @@
 http://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2016-05-06
+  Last Published: 
2016-05-08
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/allclasses-frame.html
--
diff --git a/devapidocs/allclasses-frame.html b/devapidocs/allclasses-frame.html
index e62fee6..73c2a87 100644
--- a/devapidocs/allclasses-frame.html
+++ b/devapidocs/allclasses-frame.html
@@ -23,6 +23,7 @@
 AbstractMultiOutputCompactor
 AbstractPositionedByteRange
 AbstractProtobufLogWriter
+AbstractRegionServerCallable
 AbstractRpcClient
 AbstractRpcClient.BlockingRpcChannelImplementation
 AccessControlClient
@@ -74,8 +75,9 @@
 AsyncProcess.Retry
 AsyncProtobufLogWriter
 AsyncProtobufLogWriter.BlockingCompletionHandler
-AsyncRpcChannel

[12/51] [partial] hbase-site git commit: Published site at 9ee0cbb995c1d7de905f4138a199f115762725e8.

2016-05-09 Thread misty
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/33c287c2/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
index b8db80e..98007c6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
@@ -114,114 +114,104 @@
 
 
 void
-BaseMasterAndRegionObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx)
+MasterObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx)
+Called after a abortProcedure request has been 
processed.
+
 
 
 void
-BaseMasterObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx)
+BaseMasterAndRegionObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx)
 
 
 void
-MasterObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx)
-Called after a abortProcedure request has been 
processed.
-
+BaseMasterObserver.postAbortProcedure(ObserverContextMasterCoprocessorEnvironmentctx)
 
 
 void
-BaseMasterAndRegionObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
   HColumnDescriptorcolumnFamily)
-Deprecated.
+Deprecated.
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645).
+ Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterAndRegionObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
   HColumnDescriptorcolumnFamily)
-Deprecated.
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645).
- Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated.
 
 
 
 void
-MasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterObserver.postAddColumn(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
   HColumnDescriptorcolumnFamily)
 Deprecated.
 As of release 2.0.0, this will be removed in HBase 3.0.0
  (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645).
- Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+ Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
 
 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
-  HColumnDescriptorcolumnFamily)
+  HColumnDescriptorcolumnFamily)
+Called after the new column family has been created.
+
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
   HColumnDescriptorcolumnFamily)
 
 
 void
-MasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
+BaseMasterObserver.postAddColumnFamily(ObserverContextMasterCoprocessorEnvironmentctx,
   TableNametableName,
-  HColumnDescriptorcolumnFamily)
-Called after the new column family has been created.
-
+  HColumnDescriptorcolumnFamily)
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx,
+MasterObserver.postAddColumnHandler(ObserverContextMasterCoprocessorEnvironmentctx,
 TableNametableName,
 HColumnDescriptorcolumnFamily)
-Deprecated.
+Deprecated.
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645;>HBASE-13645). Use
+ MasterObserver.postCompletedAddColumnFamilyAction(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void

hbase-site git commit: Trivial commit to make gitsubpub work

2016-05-09 Thread misty
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 958717f4d -> 28d625a2a


Trivial commit to make gitsubpub work


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/28d625a2
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/28d625a2
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/28d625a2

Branch: refs/heads/asf-site
Commit: 28d625a2ac8a4808b81eaa64071cd86ced1e96f9
Parents: 958717f
Author: Misty Stanley-Jones 
Authored: Mon May 9 09:41:35 2016 -0700
Committer: Misty Stanley-Jones 
Committed: Mon May 9 09:41:35 2016 -0700

--
 index.html | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/28d625a2/index.html
--
diff --git a/index.html b/index.html
index 534c112..4105c5d 100644
--- a/index.html
+++ b/index.html
@@ -14,7 +14,7 @@
 
 
 
-  
+
 
 
   



hbase git commit: HBASE-15609 Addendum fix compilation error

2016-05-09 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 97ad33c69 -> 05378cbf6


HBASE-15609 Addendum fix compilation error


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/05378cbf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/05378cbf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/05378cbf

Branch: refs/heads/master
Commit: 05378cbf69957b76e8559185ea20235242a8b2e6
Parents: 97ad33c
Author: zhangduo 
Authored: Mon May 9 20:45:04 2016 +0800
Committer: zhangduo 
Committed: Mon May 9 20:45:04 2016 +0800

--
 .../org/apache/hadoop/hbase/master/ClusterStatusPublisher.java | 3 ++-
 .../test/java/org/apache/hadoop/hbase/client/TestResult.java   | 6 --
 2 files changed, 2 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/05378cbf/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
index e90aae6..194e023 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
@@ -59,6 +59,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.ScheduledChore;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos;
 import org.apache.hadoop.hbase.util.Addressing;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -332,7 +333,7 @@ public class ClusterStatusPublisher extends ScheduledChore {
   @Override
   protected void encode(ChannelHandlerContext channelHandlerContext,
 ClusterStatus clusterStatus, List objects) 
{
-ClusterStatusProtos.ClusterStatus csp = clusterStatus.convert();
+ClusterStatusProtos.ClusterStatus csp = 
ProtobufUtil.convert(clusterStatus);
 objects.add(new 
DatagramPacket(Unpooled.wrappedBuffer(csp.toByteArray()), isa));
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/05378cbf/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java
index 6baf9e0..0e93ee0 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestResult.java
@@ -244,12 +244,6 @@ public class TestResult extends TestCase {
   LOG.debug("As expected: " + ex.getMessage());
 }
 try {
-  emptyResult.addResults(new RegionLoadStats(0, 0, 0));
-  fail("UnsupportedOperationException should have been thrown!");
-} catch (UnsupportedOperationException ex) {
-  LOG.debug("As expected: " + ex.getMessage());
-}
-try {
   emptyResult.setExists(true);
   fail("UnsupportedOperationException should have been thrown!");
 } catch (UnsupportedOperationException ex) {



hbase git commit: HBASE-15609 Remove PB references from Result, DoubleColumnInterpreter and any such public facing class for 2.0 (Ram)

2016-05-09 Thread ramkrishna
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 b4573c803 -> fdf117ec6


HBASE-15609 Remove PB references from Result, DoubleColumnInterpreter and
any such public facing class for 2.0 (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fdf117ec
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fdf117ec
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fdf117ec

Branch: refs/heads/branch-1.3
Commit: fdf117ec69440e75d703d1091f7a3718747e4f64
Parents: b4573c8
Author: Ramkrishna 
Authored: Mon May 9 14:58:31 2016 +0530
Committer: Ramkrishna 
Committed: Mon May 9 14:58:31 2016 +0530

--
 .../src/main/java/org/apache/hadoop/hbase/ClusterStatus.java| 2 ++
 .../src/main/java/org/apache/hadoop/hbase/client/Result.java| 2 ++
 .../hbase/client/coprocessor/BigDecimalColumnInterpreter.java   | 5 -
 .../hbase/client/coprocessor/DoubleColumnInterpreter.java   | 3 ++-
 .../hadoop/hbase/client/coprocessor/LongColumnInterpreter.java  | 5 -
 .../src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java| 3 +++
 6 files changed, 17 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/fdf117ec/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
index c8caa96..f1e2d56 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
@@ -366,6 +366,7 @@ public class ClusterStatus extends VersionedWritable {
 *
 * @return the protobuf ClusterStatus
 */
+  @Deprecated
   public ClusterStatusProtos.ClusterStatus convert() {
 ClusterStatusProtos.ClusterStatus.Builder builder =
 ClusterStatusProtos.ClusterStatus.newBuilder();
@@ -432,6 +433,7 @@ public class ClusterStatus extends VersionedWritable {
* @param proto the protobuf ClusterStatus
* @return the converted ClusterStatus
*/
+  @Deprecated
   public static ClusterStatus convert(ClusterStatusProtos.ClusterStatus proto) 
{
 
 Map servers = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/fdf117ec/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
index e764c4e..3e645b0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
@@ -1001,6 +1001,7 @@ public class Result implements CellScannable, CellScanner 
{
* @param loadStats statistics about the current region from which this was 
returned
*/
   @InterfaceAudience.Private
+  @Deprecated
   public void setStatistics(ClientProtos.RegionLoadStats loadStats) {
 this.stats = loadStats;
   }
@@ -1009,6 +1010,7 @@ public class Result implements CellScannable, CellScanner 
{
* @return the associated statistics about the region from which this was 
returned. Can be
* null if stats are disabled.
*/
+  @Deprecated
   public ClientProtos.RegionLoadStats getStats() {
 return stats;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/fdf117ec/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
index d693f0c..d2e609e 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
@@ -24,8 +24,10 @@ import java.math.RoundingMode;
 
 import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.coprocessor.ColumnInterpreter;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg;
 import 

hbase git commit: HBASE-15609 Remove PB references from Result, DoubleColumnInterpreter and any such public facing class for 2.0 (Ram)

2016-05-09 Thread ramkrishna
Repository: hbase
Updated Branches:
  refs/heads/branch-1 2c280c718 -> 94c4d568b


HBASE-15609 Remove PB references from Result, DoubleColumnInterpreter and
any such public facing class for 2.0 (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/94c4d568
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/94c4d568
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/94c4d568

Branch: refs/heads/branch-1
Commit: 94c4d568b54e5b9faf3fa6b9db4107c8ecb08291
Parents: 2c280c7
Author: Ramkrishna 
Authored: Mon May 9 14:57:39 2016 +0530
Committer: Ramkrishna 
Committed: Mon May 9 14:57:39 2016 +0530

--
 .../src/main/java/org/apache/hadoop/hbase/ClusterStatus.java| 2 ++
 .../src/main/java/org/apache/hadoop/hbase/client/Result.java| 2 ++
 .../hbase/client/coprocessor/BigDecimalColumnInterpreter.java   | 5 -
 .../hbase/client/coprocessor/DoubleColumnInterpreter.java   | 3 ++-
 .../hadoop/hbase/client/coprocessor/LongColumnInterpreter.java  | 5 -
 .../src/main/java/org/apache/hadoop/hbase/ProcedureInfo.java| 3 +++
 6 files changed, 17 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/94c4d568/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
index c8caa96..f1e2d56 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
@@ -366,6 +366,7 @@ public class ClusterStatus extends VersionedWritable {
 *
 * @return the protobuf ClusterStatus
 */
+  @Deprecated
   public ClusterStatusProtos.ClusterStatus convert() {
 ClusterStatusProtos.ClusterStatus.Builder builder =
 ClusterStatusProtos.ClusterStatus.newBuilder();
@@ -432,6 +433,7 @@ public class ClusterStatus extends VersionedWritable {
* @param proto the protobuf ClusterStatus
* @return the converted ClusterStatus
*/
+  @Deprecated
   public static ClusterStatus convert(ClusterStatusProtos.ClusterStatus proto) 
{
 
 Map servers = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/94c4d568/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
index e764c4e..3e645b0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
@@ -1001,6 +1001,7 @@ public class Result implements CellScannable, CellScanner 
{
* @param loadStats statistics about the current region from which this was 
returned
*/
   @InterfaceAudience.Private
+  @Deprecated
   public void setStatistics(ClientProtos.RegionLoadStats loadStats) {
 this.stats = loadStats;
   }
@@ -1009,6 +1010,7 @@ public class Result implements CellScannable, CellScanner 
{
* @return the associated statistics about the region from which this was 
returned. Can be
* null if stats are disabled.
*/
+  @Deprecated
   public ClientProtos.RegionLoadStats getStats() {
 return stats;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/94c4d568/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
index d693f0c..d2e609e 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
@@ -24,8 +24,10 @@ import java.math.RoundingMode;
 
 import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.coprocessor.ColumnInterpreter;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg;
 import 

hbase git commit: HBASE-15609 Remove PB references from Result, DoubleColumnInterpreter and any such public facing class for 2.0 (Ram)

2016-05-09 Thread ramkrishna
Repository: hbase
Updated Branches:
  refs/heads/master 5e091 -> 97ad33c69


HBASE-15609 Remove PB references from Result, DoubleColumnInterpreter and
any such public facing class for 2.0 (Ram)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/97ad33c6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/97ad33c6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/97ad33c6

Branch: refs/heads/master
Commit: 97ad33c6918dc39811fc86d82b38e43e60361bf5
Parents: 5e0
Author: Ramkrishna 
Authored: Mon May 9 14:56:00 2016 +0530
Committer: Ramkrishna 
Committed: Mon May 9 14:56:00 2016 +0530

--
 .../java/org/apache/hadoop/hbase/ClusterId.java |   4 +-
 .../org/apache/hadoop/hbase/ClusterStatus.java  | 113 
 .../hadoop/hbase/client/AsyncProcess.java   |   6 +-
 .../hbase/client/ClusterStatusListener.java |   3 +-
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |   5 +-
 .../hadoop/hbase/client/MetricsConnection.java  |   7 +-
 .../hadoop/hbase/client/RegionLoadStats.java|  50 
 .../org/apache/hadoop/hbase/client/Result.java  |  20 +--
 .../hadoop/hbase/client/ResultStatsUtil.java|   5 +-
 .../hbase/client/ServerStatisticTracker.java|   3 +-
 .../hadoop/hbase/client/StatisticTrackable.java |   4 +-
 .../hbase/client/backoff/ServerStatistics.java  |   6 +-
 .../BigDecimalColumnInterpreter.java|   5 +-
 .../coprocessor/DoubleColumnInterpreter.java|   3 +-
 .../coprocessor/LongColumnInterpreter.java  |   5 +-
 .../hbase/coprocessor/ColumnInterpreter.java|   5 +-
 .../hadoop/hbase/protobuf/ProtobufUtil.java | 128 +++
 .../client/TestClientExponentialBackoff.java|   5 +-
 .../org/apache/hadoop/hbase/ProcedureInfo.java  |  76 +--
 .../org/apache/hadoop/hbase/ProcedureState.java |  30 +
 .../org/apache/hadoop/hbase/ProcedureUtil.java  | 103 +++
 .../hadoop/hbase/procedure2/Procedure.java  |  21 ++-
 .../store/wal/ProcedureWALFormatReader.java |   3 +-
 .../procedure2/ProcedureTestingUtility.java |  12 +-
 .../hadoop/hbase/master/MasterRpcServices.java  |   7 +-
 .../master/procedure/ProcedureSyncWait.java |   2 +-
 .../apache/hadoop/hbase/client/TestResult.java  |   3 +-
 .../master/procedure/TestProcedureAdmin.java|   2 +-
 28 files changed, 385 insertions(+), 251 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/97ad33c6/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java
--
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java
index 6a3b14f..c127627 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterId.java
@@ -81,7 +81,7 @@ public class ClusterId {
   /**
* @return A pb instance to represent this instance.
*/
-  ClusterIdProtos.ClusterId convert() {
+  public ClusterIdProtos.ClusterId convert() {
 ClusterIdProtos.ClusterId.Builder builder = 
ClusterIdProtos.ClusterId.newBuilder();
 return builder.setClusterId(this.id).build();
   }
@@ -90,7 +90,7 @@ public class ClusterId {
* @param cid
* @return A {@link ClusterId} made from the passed in cid
*/
-  static ClusterId convert(final ClusterIdProtos.ClusterId cid) {
+  public static ClusterId convert(final ClusterIdProtos.ClusterId cid) {
 return new ClusterId(cid.getClusterId());
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/97ad33c6/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
index 4b73dda..bc97a95 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
@@ -346,117 +346,4 @@ public class ClusterStatus extends VersionedWritable {
 }
 return sb.toString();
   }
-
-  /**
-* Convert a ClusterStatus to a protobuf ClusterStatus
-*
-* @return the protobuf ClusterStatus
-*/
-  public ClusterStatusProtos.ClusterStatus convert() {
-ClusterStatusProtos.ClusterStatus.Builder builder =
-ClusterStatusProtos.ClusterStatus.newBuilder();
-
builder.setHbaseVersion(HBaseVersionFileContent.newBuilder().setVersion(getHBaseVersion()));
-
-if (liveServers != null){
-  for (Map.Entry entry : liveServers.entrySet()) {
-

svn commit: r13576 - in /dev/hbase/hbase-1.1.5RC0: hbase-1.1.5-bin.tar.gz hbase-1.1.5-src.tar.gz

2016-05-09 Thread ndimiduk
Author: ndimiduk
Date: Mon May  9 06:31:05 2016
New Revision: 13576

Log:
missing HBase-1.1.5 RC0 artifacts

Added:
dev/hbase/hbase-1.1.5RC0/hbase-1.1.5-bin.tar.gz   (with props)
dev/hbase/hbase-1.1.5RC0/hbase-1.1.5-src.tar.gz   (with props)

Added: dev/hbase/hbase-1.1.5RC0/hbase-1.1.5-bin.tar.gz
==
Binary file - no diff available.

Propchange: dev/hbase/hbase-1.1.5RC0/hbase-1.1.5-bin.tar.gz
--
svn:mime-type = application/octet-stream

Added: dev/hbase/hbase-1.1.5RC0/hbase-1.1.5-src.tar.gz
==
Binary file - no diff available.

Propchange: dev/hbase/hbase-1.1.5RC0/hbase-1.1.5-src.tar.gz
--
svn:mime-type = application/octet-stream