hbase git commit: HBASE-16093 Fix splits failed before creating daughter regions leave meta inconsistent

2016-07-12 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/0.98 7ad167875 -> 3a6698e87


HBASE-16093 Fix splits failed before creating daughter regions leave meta 
inconsistent

Amending-Author: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3a6698e8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3a6698e8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3a6698e8

Branch: refs/heads/0.98
Commit: 3a6698e878fba7340f506b536d74aa0a13ebe0a2
Parents: 7ad1678
Author: Elliott Clark 
Authored: Thu Jun 23 11:27:44 2016 -0700
Committer: Andrew Purtell 
Committed: Tue Jul 12 23:09:15 2016 -0700

--
 .../apache/hadoop/hbase/master/AssignmentManager.java | 14 --
 1 file changed, 12 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3a6698e8/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
index 48c92e0..475d9ae 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
@@ -3794,9 +3794,19 @@ public class AssignmentManager extends ZooKeeperListener 
{
 return "Failed to record the splitting in meta";
   }
 } else if (code == TransitionCode.SPLIT_REVERTED) {
+  // Always bring the parent back online. Even if it's not offline
+  // There's no harm in making it online again.
   regionOnline(p, sn);
-  regionOffline(a);
-  regionOffline(b);
+
+  // Only offline the region if they are known to exist.
+  RegionState regionStateA = regionStates.getRegionState(a);
+  RegionState regionStateB = regionStates.getRegionState(b);
+  if (regionStateA != null) {
+regionOffline(a);
+  }
+  if (regionStateB != null) {
+regionOffline(b);
+  }
 
   if (isTableDisabledOrDisabling(p.getTable())) {
 invokeUnAssign(p);



hbase git commit: HBASE-16195 Should not add chunk into chunkQueue if not using chunk pool in MemStoreLAB

2016-07-12 Thread liyu
Repository: hbase
Updated Branches:
  refs/heads/0.98 364fa3f7f -> 7ad167875


HBASE-16195 Should not add chunk into chunkQueue if not using chunk pool in 
MemStoreLAB


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7ad16787
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7ad16787
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7ad16787

Branch: refs/heads/0.98
Commit: 7ad167875e3bbef084939fef099106a45ae4ebf5
Parents: 364fa3f
Author: Yu Li 
Authored: Wed Jul 13 10:05:24 2016 +0800
Committer: Yu Li 
Committed: Wed Jul 13 10:05:24 2016 +0800

--
 .../hbase/regionserver/MemStoreChunkPool.java   | 17 +
 .../hadoop/hbase/regionserver/MemStoreLAB.java  | 28 +++-
 .../hbase/regionserver/TestMemStoreLAB.java | 76 +++-
 3 files changed, 117 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7ad16787/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreChunkPool.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreChunkPool.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreChunkPool.java
index be03488..19dc956 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreChunkPool.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreChunkPool.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.regionserver.MemStoreLAB.Chunk;
 import org.apache.hadoop.util.StringUtils;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 
 /**
@@ -121,6 +122,13 @@ public class MemStoreChunkPool {
   return;
 }
 chunks.drainTo(reclaimedChunks, maxNumToPutback);
+// clear reference of any non-reclaimable chunks
+if (chunks.size() > 0) {
+  if (LOG.isTraceEnabled()) {
+LOG.trace("Left " + chunks.size() + " unreclaimable chunks, removing 
them from queue");
+  }
+  chunks.clear();
+}
   }
 
   /**
@@ -216,4 +224,13 @@ public class MemStoreChunkPool {
 return globalInstance;
   }
 
+  int getMaxCount() {
+return this.maxCount;
+  }
+
+  @VisibleForTesting
+  static void clearDisableFlag() {
+chunkPoolDisabled = false;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/7ad16787/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java
index 4f776a6..43f58d8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java
@@ -24,6 +24,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 
@@ -53,9 +55,11 @@ import com.google.common.base.Preconditions;
  */
 @InterfaceAudience.Private
 public class MemStoreLAB {
+  static final Log LOG = LogFactory.getLog(MemStoreLAB.class);
+
   private AtomicReference curChunk = new AtomicReference();
-  // A queue of chunks contained by this memstore
-  private BlockingQueue chunkQueue = new LinkedBlockingQueue();
+  // A queue of chunks contained by this memstore, used with chunk pool
+  private BlockingQueue chunkQueue = null;
 
   final static String CHUNK_SIZE_KEY = 
"hbase.hregion.memstore.mslab.chunksize";
   final static int CHUNK_SIZE_DEFAULT = 2048 * 1024;
@@ -89,6 +93,12 @@ public class MemStoreLAB {
 chunkSize = conf.getInt(CHUNK_SIZE_KEY, CHUNK_SIZE_DEFAULT);
 maxAlloc = conf.getInt(MAX_ALLOC_KEY, MAX_ALLOC_DEFAULT);
 this.chunkPool = pool;
+// currently chunkQueue is only used for chunkPool
+if (this.chunkPool != null) {
+  // set queue length to chunk pool max count to avoid keeping reference of
+  // too many non-reclaimable chunks
+  chunkQueue = new LinkedBlockingQueue(chunkPool.getMaxCount());
+}
 
 // if we don't exclude allocations >CHUNK_SIZE, we'd infiniteloop on one!
 Preconditions.checkArgument(
@@ -164,6 +174,8 @@ public class MemStoreLAB {
* Try to retire the current chunk if it is still
* c. Postcondition is that curChunk.get()
* != c
+   * @param c the chunk to ret

hbase git commit: HBASE-16195 Should not add chunk into chunkQueue if not using chunk pool in HeapMemStoreLAB

2016-07-12 Thread liyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.1 67d057451 -> 4014c23ec


HBASE-16195 Should not add chunk into chunkQueue if not using chunk pool in 
HeapMemStoreLAB


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4014c23e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4014c23e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4014c23e

Branch: refs/heads/branch-1.1
Commit: 4014c23ecb14b27fdec452f98f262288e62f5657
Parents: 67d0574
Author: Yu Li 
Authored: Wed Jul 13 09:33:24 2016 +0800
Committer: Yu Li 
Committed: Wed Jul 13 09:35:40 2016 +0800

--
 .../hbase/regionserver/HeapMemStoreLAB.java | 28 +++-
 .../hbase/regionserver/MemStoreChunkPool.java   | 17 +
 .../hbase/regionserver/TestMemStoreLAB.java | 76 +++-
 3 files changed, 117 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4014c23e/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
index 625811a..d8fa5c3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
@@ -24,6 +24,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.util.ByteRange;
@@ -62,9 +64,11 @@ public class HeapMemStoreLAB implements MemStoreLAB {
   static final int MAX_ALLOC_DEFAULT = 256 * 1024; // allocs bigger than this 
don't go through
// allocator
 
+  static final Log LOG = LogFactory.getLog(HeapMemStoreLAB.class);
+
   private AtomicReference curChunk = new AtomicReference();
-  // A queue of chunks contained by this memstore
-  private BlockingQueue chunkQueue = new LinkedBlockingQueue();
+  // A queue of chunks contained by this memstore, used with chunk pool
+  private BlockingQueue chunkQueue = null;
   final int chunkSize;
   final int maxAlloc;
   private final MemStoreChunkPool chunkPool;
@@ -87,6 +91,12 @@ public class HeapMemStoreLAB implements MemStoreLAB {
 chunkSize = conf.getInt(CHUNK_SIZE_KEY, CHUNK_SIZE_DEFAULT);
 maxAlloc = conf.getInt(MAX_ALLOC_KEY, MAX_ALLOC_DEFAULT);
 this.chunkPool = MemStoreChunkPool.getPool(conf);
+// currently chunkQueue is only used for chunkPool
+if (this.chunkPool != null) {
+  // set queue length to chunk pool max count to avoid keeping reference of
+  // too many non-reclaimable chunks
+  chunkQueue = new LinkedBlockingQueue(chunkPool.getMaxCount());
+}
 
 // if we don't exclude allocations >CHUNK_SIZE, we'd infiniteloop on one!
 Preconditions.checkArgument(
@@ -166,6 +176,8 @@ public class HeapMemStoreLAB implements MemStoreLAB {
* Try to retire the current chunk if it is still
* c. Postcondition is that curChunk.get()
* != c
+   * @param c the chunk to retire
+   * @return true if we won the race to retire the chunk
*/
   private void tryRetireChunk(Chunk c) {
 curChunk.compareAndSet(c, null);
@@ -197,7 +209,12 @@ public class HeapMemStoreLAB implements MemStoreLAB {
 // we won race - now we need to actually do the expensive
 // allocation step
 c.init();
-this.chunkQueue.add(c);
+if (chunkQueue != null && !this.closed && !this.chunkQueue.offer(c)) {
+  if (LOG.isTraceEnabled()) {
+LOG.trace("Chunk queue is full, won't reuse this new chunk. 
Current queue size: "
++ chunkQueue.size());
+  }
+}
 return c;
   } else if (chunkPool != null) {
 chunkPool.putbackChunk(c);
@@ -212,6 +229,11 @@ public class HeapMemStoreLAB implements MemStoreLAB {
 return this.curChunk.get();
   }
 
+  @VisibleForTesting
+  BlockingQueue getChunkQueue() {
+return this.chunkQueue;
+  }
+
   /**
* A chunk of memory out of which allocations are sliced.
*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/4014c23e/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreChunkPool.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreChunkPool.jav

hbase git commit: HBASE-16195 Should not add chunk into chunkQueue if not using chunk pool in HeapMemStoreLAB

2016-07-12 Thread liyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 244a6ad7d -> ab239afb6


HBASE-16195 Should not add chunk into chunkQueue if not using chunk pool in 
HeapMemStoreLAB


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ab239afb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ab239afb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ab239afb

Branch: refs/heads/branch-1.2
Commit: ab239afb67c8c5d7a3c359ec17d278b5f579f4a7
Parents: 244a6ad
Author: Yu Li 
Authored: Wed Jul 13 09:33:24 2016 +0800
Committer: Yu Li 
Committed: Wed Jul 13 09:35:15 2016 +0800

--
 .../hbase/regionserver/HeapMemStoreLAB.java | 28 +++-
 .../hbase/regionserver/MemStoreChunkPool.java   | 17 +
 .../hbase/regionserver/TestMemStoreLAB.java | 76 +++-
 3 files changed, 117 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ab239afb/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
index 625811a..d8fa5c3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
@@ -24,6 +24,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.util.ByteRange;
@@ -62,9 +64,11 @@ public class HeapMemStoreLAB implements MemStoreLAB {
   static final int MAX_ALLOC_DEFAULT = 256 * 1024; // allocs bigger than this 
don't go through
// allocator
 
+  static final Log LOG = LogFactory.getLog(HeapMemStoreLAB.class);
+
   private AtomicReference curChunk = new AtomicReference();
-  // A queue of chunks contained by this memstore
-  private BlockingQueue chunkQueue = new LinkedBlockingQueue();
+  // A queue of chunks contained by this memstore, used with chunk pool
+  private BlockingQueue chunkQueue = null;
   final int chunkSize;
   final int maxAlloc;
   private final MemStoreChunkPool chunkPool;
@@ -87,6 +91,12 @@ public class HeapMemStoreLAB implements MemStoreLAB {
 chunkSize = conf.getInt(CHUNK_SIZE_KEY, CHUNK_SIZE_DEFAULT);
 maxAlloc = conf.getInt(MAX_ALLOC_KEY, MAX_ALLOC_DEFAULT);
 this.chunkPool = MemStoreChunkPool.getPool(conf);
+// currently chunkQueue is only used for chunkPool
+if (this.chunkPool != null) {
+  // set queue length to chunk pool max count to avoid keeping reference of
+  // too many non-reclaimable chunks
+  chunkQueue = new LinkedBlockingQueue(chunkPool.getMaxCount());
+}
 
 // if we don't exclude allocations >CHUNK_SIZE, we'd infiniteloop on one!
 Preconditions.checkArgument(
@@ -166,6 +176,8 @@ public class HeapMemStoreLAB implements MemStoreLAB {
* Try to retire the current chunk if it is still
* c. Postcondition is that curChunk.get()
* != c
+   * @param c the chunk to retire
+   * @return true if we won the race to retire the chunk
*/
   private void tryRetireChunk(Chunk c) {
 curChunk.compareAndSet(c, null);
@@ -197,7 +209,12 @@ public class HeapMemStoreLAB implements MemStoreLAB {
 // we won race - now we need to actually do the expensive
 // allocation step
 c.init();
-this.chunkQueue.add(c);
+if (chunkQueue != null && !this.closed && !this.chunkQueue.offer(c)) {
+  if (LOG.isTraceEnabled()) {
+LOG.trace("Chunk queue is full, won't reuse this new chunk. 
Current queue size: "
++ chunkQueue.size());
+  }
+}
 return c;
   } else if (chunkPool != null) {
 chunkPool.putbackChunk(c);
@@ -212,6 +229,11 @@ public class HeapMemStoreLAB implements MemStoreLAB {
 return this.curChunk.get();
   }
 
+  @VisibleForTesting
+  BlockingQueue getChunkQueue() {
+return this.chunkQueue;
+  }
+
   /**
* A chunk of memory out of which allocations are sliced.
*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/ab239afb/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreChunkPool.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreChunkPool.jav

hbase git commit: HBASE-16195 Should not add chunk into chunkQueue if not using chunk pool in HeapMemStoreLAB

2016-07-12 Thread liyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 5bd5f6446 -> 922dc33fd


HBASE-16195 Should not add chunk into chunkQueue if not using chunk pool in 
HeapMemStoreLAB


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/922dc33f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/922dc33f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/922dc33f

Branch: refs/heads/branch-1.3
Commit: 922dc33fd8146312b5b7f4428bd53ee4a087ae50
Parents: 5bd5f64
Author: Yu Li 
Authored: Wed Jul 13 09:33:24 2016 +0800
Committer: Yu Li 
Committed: Wed Jul 13 09:34:52 2016 +0800

--
 .../hbase/regionserver/HeapMemStoreLAB.java | 28 +++-
 .../hbase/regionserver/MemStoreChunkPool.java   | 17 +
 .../hbase/regionserver/TestMemStoreLAB.java | 76 +++-
 3 files changed, 117 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/922dc33f/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
index 625811a..d8fa5c3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
@@ -24,6 +24,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.util.ByteRange;
@@ -62,9 +64,11 @@ public class HeapMemStoreLAB implements MemStoreLAB {
   static final int MAX_ALLOC_DEFAULT = 256 * 1024; // allocs bigger than this 
don't go through
// allocator
 
+  static final Log LOG = LogFactory.getLog(HeapMemStoreLAB.class);
+
   private AtomicReference curChunk = new AtomicReference();
-  // A queue of chunks contained by this memstore
-  private BlockingQueue chunkQueue = new LinkedBlockingQueue();
+  // A queue of chunks contained by this memstore, used with chunk pool
+  private BlockingQueue chunkQueue = null;
   final int chunkSize;
   final int maxAlloc;
   private final MemStoreChunkPool chunkPool;
@@ -87,6 +91,12 @@ public class HeapMemStoreLAB implements MemStoreLAB {
 chunkSize = conf.getInt(CHUNK_SIZE_KEY, CHUNK_SIZE_DEFAULT);
 maxAlloc = conf.getInt(MAX_ALLOC_KEY, MAX_ALLOC_DEFAULT);
 this.chunkPool = MemStoreChunkPool.getPool(conf);
+// currently chunkQueue is only used for chunkPool
+if (this.chunkPool != null) {
+  // set queue length to chunk pool max count to avoid keeping reference of
+  // too many non-reclaimable chunks
+  chunkQueue = new LinkedBlockingQueue(chunkPool.getMaxCount());
+}
 
 // if we don't exclude allocations >CHUNK_SIZE, we'd infiniteloop on one!
 Preconditions.checkArgument(
@@ -166,6 +176,8 @@ public class HeapMemStoreLAB implements MemStoreLAB {
* Try to retire the current chunk if it is still
* c. Postcondition is that curChunk.get()
* != c
+   * @param c the chunk to retire
+   * @return true if we won the race to retire the chunk
*/
   private void tryRetireChunk(Chunk c) {
 curChunk.compareAndSet(c, null);
@@ -197,7 +209,12 @@ public class HeapMemStoreLAB implements MemStoreLAB {
 // we won race - now we need to actually do the expensive
 // allocation step
 c.init();
-this.chunkQueue.add(c);
+if (chunkQueue != null && !this.closed && !this.chunkQueue.offer(c)) {
+  if (LOG.isTraceEnabled()) {
+LOG.trace("Chunk queue is full, won't reuse this new chunk. 
Current queue size: "
++ chunkQueue.size());
+  }
+}
 return c;
   } else if (chunkPool != null) {
 chunkPool.putbackChunk(c);
@@ -212,6 +229,11 @@ public class HeapMemStoreLAB implements MemStoreLAB {
 return this.curChunk.get();
   }
 
+  @VisibleForTesting
+  BlockingQueue getChunkQueue() {
+return this.chunkQueue;
+  }
+
   /**
* A chunk of memory out of which allocations are sliced.
*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/922dc33f/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreChunkPool.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreChunkPool.jav

hbase git commit: HBASE-16195 Should not add chunk into chunkQueue if not using chunk pool in HeapMemStoreLAB

2016-07-12 Thread liyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 7c4c51f2c -> 95d141f38


HBASE-16195 Should not add chunk into chunkQueue if not using chunk pool in 
HeapMemStoreLAB


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/95d141f3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/95d141f3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/95d141f3

Branch: refs/heads/branch-1
Commit: 95d141f3828c77f8ffae57e8ee4e9223cf1f43a1
Parents: 7c4c51f
Author: Yu Li 
Authored: Wed Jul 13 09:33:24 2016 +0800
Committer: Yu Li 
Committed: Wed Jul 13 09:34:34 2016 +0800

--
 .../hbase/regionserver/HeapMemStoreLAB.java | 28 +++-
 .../hbase/regionserver/MemStoreChunkPool.java   | 17 +
 .../hbase/regionserver/TestMemStoreLAB.java | 76 +++-
 3 files changed, 117 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/95d141f3/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
index 625811a..d8fa5c3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
@@ -24,6 +24,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.util.ByteRange;
@@ -62,9 +64,11 @@ public class HeapMemStoreLAB implements MemStoreLAB {
   static final int MAX_ALLOC_DEFAULT = 256 * 1024; // allocs bigger than this 
don't go through
// allocator
 
+  static final Log LOG = LogFactory.getLog(HeapMemStoreLAB.class);
+
   private AtomicReference curChunk = new AtomicReference();
-  // A queue of chunks contained by this memstore
-  private BlockingQueue chunkQueue = new LinkedBlockingQueue();
+  // A queue of chunks contained by this memstore, used with chunk pool
+  private BlockingQueue chunkQueue = null;
   final int chunkSize;
   final int maxAlloc;
   private final MemStoreChunkPool chunkPool;
@@ -87,6 +91,12 @@ public class HeapMemStoreLAB implements MemStoreLAB {
 chunkSize = conf.getInt(CHUNK_SIZE_KEY, CHUNK_SIZE_DEFAULT);
 maxAlloc = conf.getInt(MAX_ALLOC_KEY, MAX_ALLOC_DEFAULT);
 this.chunkPool = MemStoreChunkPool.getPool(conf);
+// currently chunkQueue is only used for chunkPool
+if (this.chunkPool != null) {
+  // set queue length to chunk pool max count to avoid keeping reference of
+  // too many non-reclaimable chunks
+  chunkQueue = new LinkedBlockingQueue(chunkPool.getMaxCount());
+}
 
 // if we don't exclude allocations >CHUNK_SIZE, we'd infiniteloop on one!
 Preconditions.checkArgument(
@@ -166,6 +176,8 @@ public class HeapMemStoreLAB implements MemStoreLAB {
* Try to retire the current chunk if it is still
* c. Postcondition is that curChunk.get()
* != c
+   * @param c the chunk to retire
+   * @return true if we won the race to retire the chunk
*/
   private void tryRetireChunk(Chunk c) {
 curChunk.compareAndSet(c, null);
@@ -197,7 +209,12 @@ public class HeapMemStoreLAB implements MemStoreLAB {
 // we won race - now we need to actually do the expensive
 // allocation step
 c.init();
-this.chunkQueue.add(c);
+if (chunkQueue != null && !this.closed && !this.chunkQueue.offer(c)) {
+  if (LOG.isTraceEnabled()) {
+LOG.trace("Chunk queue is full, won't reuse this new chunk. 
Current queue size: "
++ chunkQueue.size());
+  }
+}
 return c;
   } else if (chunkPool != null) {
 chunkPool.putbackChunk(c);
@@ -212,6 +229,11 @@ public class HeapMemStoreLAB implements MemStoreLAB {
 return this.curChunk.get();
   }
 
+  @VisibleForTesting
+  BlockingQueue getChunkQueue() {
+return this.chunkQueue;
+  }
+
   /**
* A chunk of memory out of which allocations are sliced.
*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/95d141f3/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreChunkPool.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreChunkPool.java
 

hbase git commit: HBASE-16195 Should not add chunk into chunkQueue if not using chunk pool in HeapMemStoreLAB

2016-07-12 Thread liyu
Repository: hbase
Updated Branches:
  refs/heads/master 911706a87 -> 3b3c3dc02


HBASE-16195 Should not add chunk into chunkQueue if not using chunk pool in 
HeapMemStoreLAB


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3b3c3dc0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3b3c3dc0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3b3c3dc0

Branch: refs/heads/master
Commit: 3b3c3dc02deaeb3c2c8ca52dd204edbf87da502f
Parents: 911706a
Author: Yu Li 
Authored: Wed Jul 13 09:33:24 2016 +0800
Committer: Yu Li 
Committed: Wed Jul 13 09:33:24 2016 +0800

--
 .../hbase/regionserver/HeapMemStoreLAB.java | 28 +++-
 .../hbase/regionserver/MemStoreChunkPool.java   | 17 +
 .../hbase/regionserver/TestMemStoreLAB.java | 76 +++-
 3 files changed, 117 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3b3c3dc0/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
index 625811a..d8fa5c3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.java
@@ -24,6 +24,8 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.util.ByteRange;
@@ -62,9 +64,11 @@ public class HeapMemStoreLAB implements MemStoreLAB {
   static final int MAX_ALLOC_DEFAULT = 256 * 1024; // allocs bigger than this 
don't go through
// allocator
 
+  static final Log LOG = LogFactory.getLog(HeapMemStoreLAB.class);
+
   private AtomicReference curChunk = new AtomicReference();
-  // A queue of chunks contained by this memstore
-  private BlockingQueue chunkQueue = new LinkedBlockingQueue();
+  // A queue of chunks contained by this memstore, used with chunk pool
+  private BlockingQueue chunkQueue = null;
   final int chunkSize;
   final int maxAlloc;
   private final MemStoreChunkPool chunkPool;
@@ -87,6 +91,12 @@ public class HeapMemStoreLAB implements MemStoreLAB {
 chunkSize = conf.getInt(CHUNK_SIZE_KEY, CHUNK_SIZE_DEFAULT);
 maxAlloc = conf.getInt(MAX_ALLOC_KEY, MAX_ALLOC_DEFAULT);
 this.chunkPool = MemStoreChunkPool.getPool(conf);
+// currently chunkQueue is only used for chunkPool
+if (this.chunkPool != null) {
+  // set queue length to chunk pool max count to avoid keeping reference of
+  // too many non-reclaimable chunks
+  chunkQueue = new LinkedBlockingQueue(chunkPool.getMaxCount());
+}
 
 // if we don't exclude allocations >CHUNK_SIZE, we'd infiniteloop on one!
 Preconditions.checkArgument(
@@ -166,6 +176,8 @@ public class HeapMemStoreLAB implements MemStoreLAB {
* Try to retire the current chunk if it is still
* c. Postcondition is that curChunk.get()
* != c
+   * @param c the chunk to retire
+   * @return true if we won the race to retire the chunk
*/
   private void tryRetireChunk(Chunk c) {
 curChunk.compareAndSet(c, null);
@@ -197,7 +209,12 @@ public class HeapMemStoreLAB implements MemStoreLAB {
 // we won race - now we need to actually do the expensive
 // allocation step
 c.init();
-this.chunkQueue.add(c);
+if (chunkQueue != null && !this.closed && !this.chunkQueue.offer(c)) {
+  if (LOG.isTraceEnabled()) {
+LOG.trace("Chunk queue is full, won't reuse this new chunk. 
Current queue size: "
++ chunkQueue.size());
+  }
+}
 return c;
   } else if (chunkPool != null) {
 chunkPool.putbackChunk(c);
@@ -212,6 +229,11 @@ public class HeapMemStoreLAB implements MemStoreLAB {
 return this.curChunk.get();
   }
 
+  @VisibleForTesting
+  BlockingQueue getChunkQueue() {
+return this.chunkQueue;
+  }
+
   /**
* A chunk of memory out of which allocations are sliced.
*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/3b3c3dc0/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreChunkPool.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreChunkPool.java
 
b/hb

[2/3] hbase git commit: HBASE-16220 Demote log level for "HRegionFileSystem - No StoreFiles for" messages to TRACE

2016-07-12 Thread apurtell
HBASE-16220 Demote log level for "HRegionFileSystem - No StoreFiles for" 
messages to TRACE


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7c4c51f2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7c4c51f2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7c4c51f2

Branch: refs/heads/branch-1
Commit: 7c4c51f2c712a290fa3b0bd8cade193026069567
Parents: 16be7bb
Author: Andrew Purtell 
Authored: Tue Jul 12 15:44:06 2016 -0700
Committer: Andrew Purtell 
Committed: Tue Jul 12 17:00:09 2016 -0700

--
 .../hbase/regionserver/HRegionFileSystem.java   | 16 
 1 file changed, 12 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7c4c51f2/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
index 397ae7b..24b485d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
@@ -198,7 +198,9 @@ public class HRegionFileSystem {
 Path familyDir = getStoreDir(familyName);
 FileStatus[] files = FSUtils.listStatus(this.fs, familyDir);
 if (files == null) {
-  LOG.debug("No StoreFiles for: " + familyDir);
+  if (LOG.isTraceEnabled()) {
+LOG.trace("No StoreFiles for: " + familyDir);
+  }
   return null;
 }
 
@@ -378,7 +380,9 @@ public class HRegionFileSystem {
 if (!fs.exists(buildPath)) {
   throw new FileNotFoundException(buildPath.toString());
 }
-LOG.debug("Committing store file " + buildPath + " as " + dstPath);
+if (LOG.isDebugEnabled()) {
+  LOG.debug("Committing store file " + buildPath + " as " + dstPath);
+}
 // buildPath exists, therefore not doing an exists() check.
 if (!rename(buildPath, dstPath)) {
   throw new IOException("Failed rename of " + buildPath + " to " + 
dstPath);
@@ -1083,10 +1087,14 @@ public class HRegionFileSystem {
   private static void sleepBeforeRetry(String msg, int sleepMultiplier, int 
baseSleepBeforeRetries,
   int hdfsClientRetriesNumber) throws InterruptedException {
 if (sleepMultiplier > hdfsClientRetriesNumber) {
-  LOG.debug(msg + ", retries exhausted");
+  if (LOG.isDebugEnabled()) {
+LOG.debug(msg + ", retries exhausted");
+  }
   return;
 }
-LOG.debug(msg + ", sleeping " + baseSleepBeforeRetries + " times " + 
sleepMultiplier);
+if (LOG.isDebugEnabled()) {
+  LOG.debug(msg + ", sleeping " + baseSleepBeforeRetries + " times " + 
sleepMultiplier);
+}
 Thread.sleep((long)baseSleepBeforeRetries * sleepMultiplier);
   }
 }



[1/3] hbase git commit: HBASE-16220 Demote log level for "HRegionFileSystem - No StoreFiles for" messages to TRACE

2016-07-12 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/0.98 a16fb9ee0 -> 364fa3f7f
  refs/heads/branch-1 16be7bba5 -> 7c4c51f2c
  refs/heads/master f292048ff -> 911706a87


HBASE-16220 Demote log level for "HRegionFileSystem - No StoreFiles for" 
messages to TRACE


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/911706a8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/911706a8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/911706a8

Branch: refs/heads/master
Commit: 911706a8732262b4ce0e060900b76f84f5fdf11b
Parents: f292048
Author: Andrew Purtell 
Authored: Tue Jul 12 15:44:06 2016 -0700
Committer: Andrew Purtell 
Committed: Tue Jul 12 15:44:06 2016 -0700

--
 .../hbase/regionserver/HRegionFileSystem.java   | 16 
 1 file changed, 12 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/911706a8/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
index 74ff546..995b111 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
@@ -199,7 +199,9 @@ public class HRegionFileSystem {
 Path familyDir = getStoreDir(familyName);
 FileStatus[] files = FSUtils.listStatus(this.fs, familyDir);
 if (files == null) {
-  LOG.debug("No StoreFiles for: " + familyDir);
+  if (LOG.isTraceEnabled()) {
+LOG.trace("No StoreFiles for: " + familyDir);
+  }
   return null;
 }
 
@@ -379,7 +381,9 @@ public class HRegionFileSystem {
 if (!fs.exists(buildPath)) {
   throw new FileNotFoundException(buildPath.toString());
 }
-LOG.debug("Committing store file " + buildPath + " as " + dstPath);
+if (LOG.isDebugEnabled()) {
+  LOG.debug("Committing store file " + buildPath + " as " + dstPath);
+}
 // buildPath exists, therefore not doing an exists() check.
 if (!rename(buildPath, dstPath)) {
   throw new IOException("Failed rename of " + buildPath + " to " + 
dstPath);
@@ -1082,10 +1086,14 @@ public class HRegionFileSystem {
   private static void sleepBeforeRetry(String msg, int sleepMultiplier, int 
baseSleepBeforeRetries,
   int hdfsClientRetriesNumber) throws InterruptedException {
 if (sleepMultiplier > hdfsClientRetriesNumber) {
-  LOG.debug(msg + ", retries exhausted");
+  if (LOG.isDebugEnabled()) {
+LOG.debug(msg + ", retries exhausted");
+  }
   return;
 }
-LOG.debug(msg + ", sleeping " + baseSleepBeforeRetries + " times " + 
sleepMultiplier);
+if (LOG.isDebugEnabled()) {
+  LOG.debug(msg + ", sleeping " + baseSleepBeforeRetries + " times " + 
sleepMultiplier);
+}
 Thread.sleep((long)baseSleepBeforeRetries * sleepMultiplier);
   }
 }



[3/3] hbase git commit: HBASE-16220 Demote log level for "HRegionFileSystem - No StoreFiles for" messages to TRACE

2016-07-12 Thread apurtell
HBASE-16220 Demote log level for "HRegionFileSystem - No StoreFiles for" 
messages to TRACE


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/364fa3f7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/364fa3f7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/364fa3f7

Branch: refs/heads/0.98
Commit: 364fa3f7f5f90dd7ac87b1c48234bfa4fd4cd5b9
Parents: a16fb9e
Author: Andrew Purtell 
Authored: Tue Jul 12 15:44:06 2016 -0700
Committer: Andrew Purtell 
Committed: Tue Jul 12 17:01:16 2016 -0700

--
 .../hbase/regionserver/HRegionFileSystem.java   | 16 
 1 file changed, 12 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/364fa3f7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
index 578bbfc..828e815 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionFileSystem.java
@@ -189,7 +189,9 @@ public class HRegionFileSystem {
 Path familyDir = getStoreDir(familyName);
 FileStatus[] files = FSUtils.listStatus(this.fs, familyDir);
 if (files == null) {
-  LOG.debug("No StoreFiles for: " + familyDir);
+  if (LOG.isTraceEnabled()) {
+LOG.trace("No StoreFiles for: " + familyDir);
+  }
   return null;
 }
 
@@ -367,7 +369,9 @@ public class HRegionFileSystem {
 if (!fs.exists(buildPath)) {
   throw new FileNotFoundException(buildPath.toString());
 }
-LOG.debug("Committing store file " + buildPath + " as " + dstPath);
+if (LOG.isDebugEnabled()) {
+  LOG.debug("Committing store file " + buildPath + " as " + dstPath);
+}
 // buildPath exists, therefore not doing an exists() check.
 if (!rename(buildPath, dstPath)) {
   throw new IOException("Failed rename of " + buildPath + " to " + 
dstPath);
@@ -1048,10 +1052,14 @@ public class HRegionFileSystem {
   private static void sleepBeforeRetry(String msg, int sleepMultiplier, int 
baseSleepBeforeRetries,
   int hdfsClientRetriesNumber) {
 if (sleepMultiplier > hdfsClientRetriesNumber) {
-  LOG.debug(msg + ", retries exhausted");
+  if (LOG.isDebugEnabled()) {
+LOG.debug(msg + ", retries exhausted");
+  }
   return;
 }
-LOG.debug(msg + ", sleeping " + baseSleepBeforeRetries + " times " + 
sleepMultiplier);
+if (LOG.isDebugEnabled()) {
+  LOG.debug(msg + ", sleeping " + baseSleepBeforeRetries + " times " + 
sleepMultiplier);
+}
 Threads.sleep((long)baseSleepBeforeRetries * sleepMultiplier);
   }
 }



hbase git commit: HBASE-16208 Check that the row exists before attempting to remove a queue from TableBasedReplicationQueuesImpl

2016-07-12 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/master 6d94925af -> f292048ff


HBASE-16208 Check that the row exists before attempting to remove a queue from 
TableBasedReplicationQueuesImpl

Signed-off-by: Elliott Clark 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f292048f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f292048f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f292048f

Branch: refs/heads/master
Commit: f292048ffd91503360cb56bb9b5f60fd47e2ebe0
Parents: 6d94925
Author: Joseph Hwang 
Authored: Mon Jul 11 10:45:53 2016 -0700
Committer: Elliott Clark 
Committed: Tue Jul 12 14:50:07 2016 -0700

--
 .../replication/TableBasedReplicationQueuesImpl.java  | 10 +++---
 .../hbase/replication/TestReplicationStateHBaseImpl.java  |  7 ++-
 2 files changed, 13 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f292048f/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
index 28fa967..3ee6fde 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
@@ -93,11 +93,15 @@ public class TableBasedReplicationQueuesImpl extends 
ReplicationTableBase
 
   @Override
   public void removeQueue(String queueId) {
-
 try {
   byte[] rowKey = queueIdToRowKey(queueId);
-  Delete deleteQueue = new Delete(rowKey);
-  safeQueueUpdate(deleteQueue);
+  if (checkQueueExists(queueId)) {
+Delete deleteQueue = new Delete(rowKey);
+safeQueueUpdate(deleteQueue);
+  } else {
+LOG.info("No logs were registered for queue id=" + queueId + " so no 
rows were removed " +
+"from the replication table while removing the queue");
+  }
 } catch (IOException | ReplicationException e) {
   String errMsg = "Failed removing queue queueId=" + queueId;
   abortable.abort(errMsg, e);

http://git-wip-us.apache.org/repos/asf/hbase/blob/f292048f/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateHBaseImpl.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateHBaseImpl.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateHBaseImpl.java
index ccae8a5..7ec6df8 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateHBaseImpl.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationStateHBaseImpl.java
@@ -84,7 +84,7 @@ public class TestReplicationStateHBaseImpl {
 conf.setClass("hbase.region.replica.replication.replicationQueues.class",
   TableBasedReplicationQueuesImpl.class, ReplicationQueues.class);
 
conf.setClass("hbase.region.replica.replication.replicationQueuesClient.class",
-  TableBasedReplicationQueuesClientImpl.class, 
ReplicationQueuesClient.class);
+TableBasedReplicationQueuesClientImpl.class, 
ReplicationQueuesClient.class);
 utility.startMiniCluster();
 zkw = HBaseTestingUtility.getZooKeeperWatcher(utility);
 String replicationZNodeName = conf.get("zookeeper.znode.replication", 
"replication");
@@ -195,6 +195,11 @@ public class TestReplicationStateHBaseImpl {
   assertNull(rq1.getLogsInQueue("Queue1"));
   // Test that getting logs from a non-existent queue aborts
   assertEquals(6, ds1.getAbortCount());
+  // Test removing a non-existent queue does not cause an abort. This is 
because we can
+  // attempt to remove a queue that has no corresponding Replication Table 
row (if we never
+  // registered a WAL for it)
+  rq1.removeQueue("NotHereQueue");
+  assertEquals(6, ds1.getAbortCount());
 } catch (ReplicationException e) {
   e.printStackTrace();
   fail("testAddLog received a ReplicationException");



[2/3] hbase git commit: HBASE-16211 JMXCacheBuster restarting the metrics system might cause tests to hang

2016-07-12 Thread enis
HBASE-16211 JMXCacheBuster restarting the metrics system might cause tests to 
hang


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/16be7bba
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/16be7bba
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/16be7bba

Branch: refs/heads/branch-1
Commit: 16be7bba53165240b3bb90fcf30126a91052ac95
Parents: 1765988
Author: Enis Soztutar 
Authored: Tue Jul 12 13:43:52 2016 -0700
Committer: Enis Soztutar 
Committed: Tue Jul 12 13:43:58 2016 -0700

--
 .../hadoop/metrics2/impl/JmxCacheBuster.java| 31 
 1 file changed, 31 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/16be7bba/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
index 8fcf623..1ae9bd4 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.metrics2.impl;
 
 import java.util.concurrent.ScheduledFuture;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.commons.logging.Log;
@@ -27,6 +28,9 @@ import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.metrics2.MetricsExecutor;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl;
+import org.apache.hadoop.util.StringUtils;
+
+import com.google.common.annotations.VisibleForTesting;
 
 /**
  * JMX caches the beans that have been exported; even after the values are 
removed from hadoop's
@@ -41,6 +45,7 @@ public class JmxCacheBuster {
   private static final Log LOG = LogFactory.getLog(JmxCacheBuster.class);
   private static AtomicReference fut = new 
AtomicReference<>(null);
   private static MetricsExecutor executor = new MetricsExecutorImpl();
+  private static AtomicBoolean stopped = new AtomicBoolean(false);
 
   private JmxCacheBuster() {
 // Static only cache.
@@ -50,16 +55,42 @@ public class JmxCacheBuster {
* For JMX to forget about all previously exported metrics.
*/
   public static void clearJmxCache() {
+if (LOG.isTraceEnabled()) {
+  LOG.trace("clearing JMX Cache" + StringUtils.stringifyException(new 
Exception()));
+}
 //If there are more then 100 ms before the executor will run then 
everything should be merged.
 ScheduledFuture future = fut.get();
 if ((future != null && (!future.isDone() && 
future.getDelay(TimeUnit.MILLISECONDS) > 100))) {
   // BAIL OUT
   return;
 }
+if (stopped.get()) {
+  return;
+}
 future = executor.getExecutor().schedule(new JmxCacheBusterRunnable(), 5, 
TimeUnit.SECONDS);
 fut.set(future);
   }
 
+  /**
+   * Stops the clearing of JMX metrics and restarting the Hadoop metrics 
system. This is needed for
+   * some test environments where we manually inject sources or sinks 
dynamically.
+   */
+  @VisibleForTesting
+  public static void stop() {
+stopped.set(true);
+ScheduledFuture future = fut.get();
+future.cancel(false);
+  }
+
+  /**
+   * Restarts the stopped service.
+   * @see #stop()
+   */
+  @VisibleForTesting
+  public static void restart() {
+stopped.set(false);
+  }
+
   final static class JmxCacheBusterRunnable implements Runnable {
 @Override
 public void run() {



[3/3] hbase git commit: HBASE-16211 JMXCacheBuster restarting the metrics system might cause tests to hang

2016-07-12 Thread enis
HBASE-16211 JMXCacheBuster restarting the metrics system might cause tests to 
hang


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5bd5f644
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5bd5f644
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5bd5f644

Branch: refs/heads/branch-1.3
Commit: 5bd5f6446660932c9c2ea01d8bd48d1f012b225b
Parents: 51eee9a
Author: Enis Soztutar 
Authored: Tue Jul 12 13:43:52 2016 -0700
Committer: Enis Soztutar 
Committed: Tue Jul 12 13:45:32 2016 -0700

--
 .../hadoop/metrics2/impl/JmxCacheBuster.java| 31 
 1 file changed, 31 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5bd5f644/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
index 8fcf623..1ae9bd4 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.metrics2.impl;
 
 import java.util.concurrent.ScheduledFuture;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.commons.logging.Log;
@@ -27,6 +28,9 @@ import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.metrics2.MetricsExecutor;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl;
+import org.apache.hadoop.util.StringUtils;
+
+import com.google.common.annotations.VisibleForTesting;
 
 /**
  * JMX caches the beans that have been exported; even after the values are 
removed from hadoop's
@@ -41,6 +45,7 @@ public class JmxCacheBuster {
   private static final Log LOG = LogFactory.getLog(JmxCacheBuster.class);
   private static AtomicReference fut = new 
AtomicReference<>(null);
   private static MetricsExecutor executor = new MetricsExecutorImpl();
+  private static AtomicBoolean stopped = new AtomicBoolean(false);
 
   private JmxCacheBuster() {
 // Static only cache.
@@ -50,16 +55,42 @@ public class JmxCacheBuster {
* For JMX to forget about all previously exported metrics.
*/
   public static void clearJmxCache() {
+if (LOG.isTraceEnabled()) {
+  LOG.trace("clearing JMX Cache" + StringUtils.stringifyException(new 
Exception()));
+}
 //If there are more then 100 ms before the executor will run then 
everything should be merged.
 ScheduledFuture future = fut.get();
 if ((future != null && (!future.isDone() && 
future.getDelay(TimeUnit.MILLISECONDS) > 100))) {
   // BAIL OUT
   return;
 }
+if (stopped.get()) {
+  return;
+}
 future = executor.getExecutor().schedule(new JmxCacheBusterRunnable(), 5, 
TimeUnit.SECONDS);
 fut.set(future);
   }
 
+  /**
+   * Stops the clearing of JMX metrics and restarting the Hadoop metrics 
system. This is needed for
+   * some test environments where we manually inject sources or sinks 
dynamically.
+   */
+  @VisibleForTesting
+  public static void stop() {
+stopped.set(true);
+ScheduledFuture future = fut.get();
+future.cancel(false);
+  }
+
+  /**
+   * Restarts the stopped service.
+   * @see #stop()
+   */
+  @VisibleForTesting
+  public static void restart() {
+stopped.set(false);
+  }
+
   final static class JmxCacheBusterRunnable implements Runnable {
 @Override
 public void run() {



[1/3] hbase git commit: HBASE-16211 JMXCacheBuster restarting the metrics system might cause tests to hang

2016-07-12 Thread enis
Repository: hbase
Updated Branches:
  refs/heads/branch-1 176598860 -> 16be7bba5
  refs/heads/branch-1.3 51eee9a22 -> 5bd5f6446
  refs/heads/master 90ba723dc -> 6d94925af


HBASE-16211 JMXCacheBuster restarting the metrics system might cause tests to 
hang


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6d94925a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6d94925a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6d94925a

Branch: refs/heads/master
Commit: 6d94925af9d2a3eb2f181d0aa1a875becf258fc8
Parents: 90ba723
Author: Enis Soztutar 
Authored: Tue Jul 12 13:43:52 2016 -0700
Committer: Enis Soztutar 
Committed: Tue Jul 12 13:43:52 2016 -0700

--
 .../hadoop/metrics2/impl/JmxCacheBuster.java| 31 
 1 file changed, 31 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6d94925a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
index 8fcf623..1ae9bd4 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
+++ 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/metrics2/impl/JmxCacheBuster.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.metrics2.impl;
 
 import java.util.concurrent.ScheduledFuture;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.commons.logging.Log;
@@ -27,6 +28,9 @@ import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.metrics2.MetricsExecutor;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl;
+import org.apache.hadoop.util.StringUtils;
+
+import com.google.common.annotations.VisibleForTesting;
 
 /**
  * JMX caches the beans that have been exported; even after the values are 
removed from hadoop's
@@ -41,6 +45,7 @@ public class JmxCacheBuster {
   private static final Log LOG = LogFactory.getLog(JmxCacheBuster.class);
   private static AtomicReference fut = new 
AtomicReference<>(null);
   private static MetricsExecutor executor = new MetricsExecutorImpl();
+  private static AtomicBoolean stopped = new AtomicBoolean(false);
 
   private JmxCacheBuster() {
 // Static only cache.
@@ -50,16 +55,42 @@ public class JmxCacheBuster {
* For JMX to forget about all previously exported metrics.
*/
   public static void clearJmxCache() {
+if (LOG.isTraceEnabled()) {
+  LOG.trace("clearing JMX Cache" + StringUtils.stringifyException(new 
Exception()));
+}
 //If there are more then 100 ms before the executor will run then 
everything should be merged.
 ScheduledFuture future = fut.get();
 if ((future != null && (!future.isDone() && 
future.getDelay(TimeUnit.MILLISECONDS) > 100))) {
   // BAIL OUT
   return;
 }
+if (stopped.get()) {
+  return;
+}
 future = executor.getExecutor().schedule(new JmxCacheBusterRunnable(), 5, 
TimeUnit.SECONDS);
 fut.set(future);
   }
 
+  /**
+   * Stops the clearing of JMX metrics and restarting the Hadoop metrics 
system. This is needed for
+   * some test environments where we manually inject sources or sinks 
dynamically.
+   */
+  @VisibleForTesting
+  public static void stop() {
+stopped.set(true);
+ScheduledFuture future = fut.get();
+future.cancel(false);
+  }
+
+  /**
+   * Restarts the stopped service.
+   * @see #stop()
+   */
+  @VisibleForTesting
+  public static void restart() {
+stopped.set(false);
+  }
+
   final static class JmxCacheBusterRunnable implements Runnable {
 @Override
 public void run() {



hbase git commit: HBASE-16214 Add in a UI description for the Replication Table

2016-07-12 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/master 8cfaa0e72 -> 90ba723dc


HBASE-16214 Add in a UI description for the Replication Table

Signed-off-by: Elliott Clark 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/90ba723d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/90ba723d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/90ba723d

Branch: refs/heads/master
Commit: 90ba723dc63571cef14546e9988a76f0adb923aa
Parents: 8cfaa0e
Author: Joseph Hwang 
Authored: Tue Jul 12 09:45:05 2016 -0700
Committer: Elliott Clark 
Committed: Tue Jul 12 12:21:55 2016 -0700

--
 .../apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon| 7 +--
 1 file changed, 5 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/90ba723d/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
index 056c2d7..d19df21 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
@@ -387,7 +387,7 @@ AssignmentManager assignmentManager = 
master.getAssignmentManager();
 description = "The hbase:canary table is used to sniff the write 
availbility of"
   + " each regionserver.";
 } else if (tableName.equals(AccessControlLists.ACL_TABLE_NAME)){
-description = "The hbase:acl table holds information about acl";
+description = "The hbase:acl table holds information about acl.";
 } else if (tableName.equals(VisibilityConstants.LABELS_TABLE_NAME)){
 description = "The hbase:labels table holds information about 
visibility labels.";
 } else if (tableName.equals(TableName.NAMESPACE_TABLE_NAME)){
@@ -396,7 +396,10 @@ AssignmentManager assignmentManager = 
master.getAssignmentManager();
 description = "The hbase:quota table holds quota information about 
number" +
 " or size of requests in a given time frame.";
 } else if (tableName.equals(TableName.valueOf("hbase:rsgroup"))){
-description = "The hbase:rsgroup table holds information about 
regionserver groups";
+description = "The hbase:rsgroup table holds information about 
regionserver groups.";
+} else if (tableName.equals(TableName.valueOf("hbase:replication"))) {
+description = "The hbase:replication table tracks cross cluster 
replication through " +
+"WAL file offsets.";
 }
 
 <% description %>



hbase git commit: HBASE-16216 Clean up source code.

2016-07-12 Thread eclark
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 0eb150f9d -> 6cadc3828


HBASE-16216 Clean up source code.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6cadc382
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6cadc382
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6cadc382

Branch: refs/heads/HBASE-14850
Commit: 6cadc3828b714c31380df8f274c9a7fd6916a475
Parents: 0eb150f
Author: Elliott Clark 
Authored: Tue Jul 12 12:21:11 2016 -0700
Committer: Elliott Clark 
Committed: Tue Jul 12 13:01:37 2016 -0700

--
 hbase-native-client/core/cell-test.cc |  8 +++---
 hbase-native-client/core/cell.cc  | 40 +++---
 hbase-native-client/core/cell.h   |  4 +--
 hbase-native-client/if/HBase.proto|  5 
 hbase-native-client/test-util/test-util.h |  2 +-
 hbase-native-client/third-party/BUCK  |  6 ++--
 hbase-native-client/utils/user-util.h |  2 +-
 7 files changed, 21 insertions(+), 46 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6cadc382/hbase-native-client/core/cell-test.cc
--
diff --git a/hbase-native-client/core/cell-test.cc 
b/hbase-native-client/core/cell-test.cc
index 49f567b..1d5c0eb 100644
--- a/hbase-native-client/core/cell-test.cc
+++ b/hbase-native-client/core/cell-test.cc
@@ -19,9 +19,9 @@
 
 #include "core/cell.h"
 
-#include 
-#include 
 #include 
+#include 
+#include 
 
 using namespace hbase;
 TEST(CellTest, CellFailureTest) {
@@ -74,8 +74,8 @@ TEST(CellTest, MultipleCellsTest) {
 row += std::to_string(i);
 value += std::to_string(i);
 CellType cell_type = CellType::PUT;
-const Cell *cell = new Cell(row, family, column, timestamp, value,
-cell_type);
+const Cell *cell =
+new Cell(row, family, column, timestamp, value, cell_type);
 cells.push_back(cell);
   }
   int i = 0;

http://git-wip-us.apache.org/repos/asf/hbase/blob/6cadc382/hbase-native-client/core/cell.cc
--
diff --git a/hbase-native-client/core/cell.cc b/hbase-native-client/core/cell.cc
index f214479..5129bc9 100644
--- a/hbase-native-client/core/cell.cc
+++ b/hbase-native-client/core/cell.cc
@@ -25,13 +25,8 @@ namespace hbase {
 Cell::Cell(const std::string &row, const std::string &family,
const std::string &qualifier, const long ×tamp,
const std::string &value, const hbase::CellType &cell_type)
-: row_(row),
-  family_(family),
-  qualifier_(qualifier),
-  timestamp_(timestamp),
-  cell_type_(cell_type),
-  value_(value),
-  sequence_id_(0) {
+: row_(row), family_(family), qualifier_(qualifier), timestamp_(timestamp),
+  cell_type_(cell_type), value_(value), sequence_id_(0) {
 
   if (0 == row.size())
 throw std::runtime_error("Row size should be greater than 0");
@@ -43,35 +38,20 @@ Cell::Cell(const std::string &row, const std::string 
&family,
 throw std::runtime_error("Timestamp should be greater than 0");
 }
 
-Cell::~Cell() {
-}
+Cell::~Cell() {}
 
-const std::string &Cell::Row() const {
-  return row_;
-}
+const std::string &Cell::Row() const { return row_; }
 
-const std::string &Cell::Family() const {
-  return family_;
-}
+const std::string &Cell::Family() const { return family_; }
 
-const std::string &Cell::Qualifier() const {
-  return qualifier_;
-}
+const std::string &Cell::Qualifier() const { return qualifier_; }
 
-unsigned long Cell::Timestamp() const {
-  return timestamp_;
-}
+unsigned long Cell::Timestamp() const { return timestamp_; }
 
-const std::string &Cell::Value() const {
-  return value_;
-}
+const std::string &Cell::Value() const { return value_; }
 
-hbase::CellType Cell::Type() const {
-  return cell_type_;
-}
+hbase::CellType Cell::Type() const { return cell_type_; }
 
-long Cell::SequenceId() const {
-  return sequence_id_;
-}
+long Cell::SequenceId() const { return sequence_id_; }
 
 } /* namespace hbase */

http://git-wip-us.apache.org/repos/asf/hbase/blob/6cadc382/hbase-native-client/core/cell.h
--
diff --git a/hbase-native-client/core/cell.h b/hbase-native-client/core/cell.h
index 16ed280..2b15ad6 100644
--- a/hbase-native-client/core/cell.h
+++ b/hbase-native-client/core/cell.h
@@ -34,7 +34,7 @@ enum CellType {
 };
 
 class Cell {
- public:
+public:
   Cell(const std::string &row, const std::string &family,
const std::string &qualifier, const long ×tamp,
const std::string &value, const hbase::CellType &cell_type);
@@ -47,7 +47,7 @@ class Cell {
   CellType Type() const;
   long SequenceId() const;
 
- private:
+private:
   std::string row_;
   std::

hbase git commit: HBASE-16019 1.2.2RC2 passed; start 1.2.3-SNAPSHOT.

2016-07-12 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 9b5f19eae -> 244a6ad7d


HBASE-16019 1.2.2RC2 passed; start 1.2.3-SNAPSHOT.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/244a6ad7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/244a6ad7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/244a6ad7

Branch: refs/heads/branch-1.2
Commit: 244a6ad7df8e03bd10187e03b67d33d0ff381190
Parents: 9b5f19e
Author: Sean Busbey 
Authored: Tue Jul 12 14:15:21 2016 -0500
Committer: Sean Busbey 
Committed: Tue Jul 12 14:15:21 2016 -0500

--
 hbase-annotations/pom.xml| 2 +-
 hbase-assembly/pom.xml   | 2 +-
 hbase-checkstyle/pom.xml | 4 ++--
 hbase-client/pom.xml | 2 +-
 hbase-common/pom.xml | 2 +-
 hbase-examples/pom.xml   | 2 +-
 hbase-external-blockcache/pom.xml| 2 +-
 hbase-hadoop-compat/pom.xml  | 2 +-
 hbase-hadoop2-compat/pom.xml | 2 +-
 hbase-it/pom.xml | 2 +-
 hbase-prefix-tree/pom.xml| 2 +-
 hbase-procedure/pom.xml  | 2 +-
 hbase-protocol/pom.xml   | 2 +-
 hbase-resource-bundle/pom.xml| 2 +-
 hbase-rest/pom.xml   | 2 +-
 hbase-server/pom.xml | 2 +-
 hbase-shaded/hbase-shaded-client/pom.xml | 2 +-
 hbase-shaded/hbase-shaded-server/pom.xml | 2 +-
 hbase-shaded/pom.xml | 2 +-
 hbase-shell/pom.xml  | 2 +-
 hbase-testing-util/pom.xml   | 2 +-
 hbase-thrift/pom.xml | 2 +-
 pom.xml  | 2 +-
 23 files changed, 24 insertions(+), 24 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/244a6ad7/hbase-annotations/pom.xml
--
diff --git a/hbase-annotations/pom.xml b/hbase-annotations/pom.xml
index a9cac56..2c0dd4c 100644
--- a/hbase-annotations/pom.xml
+++ b/hbase-annotations/pom.xml
@@ -23,7 +23,7 @@
   
 hbase
 org.apache.hbase
-1.2.2
+1.2.3-SNAPSHOT
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/244a6ad7/hbase-assembly/pom.xml
--
diff --git a/hbase-assembly/pom.xml b/hbase-assembly/pom.xml
index a278689..ce6adec 100644
--- a/hbase-assembly/pom.xml
+++ b/hbase-assembly/pom.xml
@@ -23,7 +23,7 @@
   
 hbase
 org.apache.hbase
-1.2.2
+1.2.3-SNAPSHOT
 ..
   
   hbase-assembly

http://git-wip-us.apache.org/repos/asf/hbase/blob/244a6ad7/hbase-checkstyle/pom.xml
--
diff --git a/hbase-checkstyle/pom.xml b/hbase-checkstyle/pom.xml
index daa3f22..61ea19b 100644
--- a/hbase-checkstyle/pom.xml
+++ b/hbase-checkstyle/pom.xml
@@ -24,14 +24,14 @@
 4.0.0
 org.apache.hbase
 hbase-checkstyle
-1.2.2
+1.2.3-SNAPSHOT
 Apache HBase - Checkstyle
 Module to hold Checkstyle properties for HBase.
 
   
 hbase
 org.apache.hbase
-1.2.2
+1.2.3-SNAPSHOT
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/244a6ad7/hbase-client/pom.xml
--
diff --git a/hbase-client/pom.xml b/hbase-client/pom.xml
index f9f4ac4..1e8019f 100644
--- a/hbase-client/pom.xml
+++ b/hbase-client/pom.xml
@@ -24,7 +24,7 @@
   
 hbase
 org.apache.hbase
-1.2.2
+1.2.3-SNAPSHOT
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/244a6ad7/hbase-common/pom.xml
--
diff --git a/hbase-common/pom.xml b/hbase-common/pom.xml
index bf4cc64..12b05dd 100644
--- a/hbase-common/pom.xml
+++ b/hbase-common/pom.xml
@@ -23,7 +23,7 @@
   
 hbase
 org.apache.hbase
-1.2.2
+1.2.3-SNAPSHOT
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/244a6ad7/hbase-examples/pom.xml
--
diff --git a/hbase-examples/pom.xml b/hbase-examples/pom.xml
index c36c9ca..8cd218c 100644
--- a/hbase-examples/pom.xml
+++ b/hbase-examples/pom.xml
@@ -23,7 +23,7 @@
   
 hbase
 org.apache.hbase
-1.2.2
+1.2.3-SNAPSHOT
 ..
   
   hbase-examples

http://git-wip-us.apache.org/repos/asf/hbase/blob/244a6ad7/hbase-external-blockcache/pom.xml
--
diff --git a/hbase-external-blockcache/pom.xml 
b/hbase-external-blockcache/pom.xml
index 3d27e56..89f7f16 100644
--- a/hbase-external-blockcache/pom.xml
+++ b/hbase-external-blockcache/pom.xml
@@ -25,7 +25,7 @@
   
 hbase
 org.apache.hbase
-1.2.2
+1.2.3-SNAPSHOT
 .

svn commit: r14371 - in /dev/hbase: hbase-1.2.2RC0/ hbase-1.2.2RC1/ hbase-1.2.2RC2/

2016-07-12 Thread busbey
Author: busbey
Date: Tue Jul 12 19:18:33 2016
New Revision: 14371

Log:
removing RCs for 1.2.2

Removed:
dev/hbase/hbase-1.2.2RC0/
dev/hbase/hbase-1.2.2RC1/
dev/hbase/hbase-1.2.2RC2/



svn commit: r14370 - /release/hbase/HEADER.html

2016-07-12 Thread busbey
Author: busbey
Date: Tue Jul 12 19:00:48 2016
New Revision: 14370

Log:
Missed reference to stable line in header.

Modified:
release/hbase/HEADER.html

Modified: release/hbase/HEADER.html
==
--- release/hbase/HEADER.html (original)
+++ release/hbase/HEADER.html Tue Jul 12 19:00:48 2016
@@ -5,8 +5,8 @@ href="http://www.apache.org/dyn/closer.c
 mirror site, not from www.apache.org.
 
 We suggest downloading the current stable release.
-The 1.1.x series is the current stable release line, it supercedes 1.0.x, 
0.98.x and 0.94.x
-(the 1.0.x, 0.98.x and 0.94.x lines are still seeing a monthly cadence of bug 
fix releases for
+The 1.2.x series is the current stable release line, it supercedes 1.1.x, 
1.0.x, 0.98.x and 0.94.x
+(the 1.1.x, 1.0.x, 0.98.x and 0.94.x lines are still seeing a monthly cadence 
of bug fix releases for
 those who are not easily able to update).  Note that 0.96 was EOL'd
 September 1st, 2014.
 For older versions, check the http://archive.apache.org/dist/hbase/";>apache archive.




svn commit: r14369 - in /release/hbase: ./ 1.2.1/ 1.2.2/

2016-07-12 Thread busbey
Author: busbey
Date: Tue Jul 12 18:46:12 2016
New Revision: 14369

Log:
Release HBase 1.2.2

vote: https://s.apache.org/hbase-1.2.2-rc2-vote

results: https://s.apache.org/hbase-1.2.2-vote-passes

discussion of stable pointer: https://s.apache.org/hbase-stable-to-1.2-releases


Added:
release/hbase/1.2.2/
release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz   (with props)
release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.asc
release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.md5
release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.mds
release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.sha
release/hbase/1.2.2/hbase-1.2.2-src.tar.gz   (with props)
release/hbase/1.2.2/hbase-1.2.2-src.tar.gz.asc
release/hbase/1.2.2/hbase-1.2.2-src.tar.gz.md5
release/hbase/1.2.2/hbase-1.2.2-src.tar.gz.mds
release/hbase/1.2.2/hbase-1.2.2-src.tar.gz.sha
Removed:
release/hbase/1.2.1/
Modified:
release/hbase/stable

Added: release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz
==
Binary file - no diff available.

Propchange: release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz
--
svn:mime-type = application/octet-stream

Added: release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.asc
==
--- release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.asc (added)
+++ release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.asc Tue Jul 12 18:46:12 2016
@@ -0,0 +1,17 @@
+-BEGIN PGP SIGNATURE-
+Version: GnuPG v1
+
+iQIcBAABCgAGBQJXdv71AAoJEOZeEdQNgNt89SMP/jyTlr9Bqov1RPB3nNIESZh3
+33jEgnLGP80Cj0koDcU0z1m6TeEKic3omOl4EZp9WPTpMPzyaz7JnaNm9UwiQtjm
+nZ+0EQuvAuTs83uEDbTHGGVwg5v84KTIaBtMbqYyFJ5FHQk9zENmEFqT54HdKsOb
+RMYYIDxYN4/j3WOpjclJG51glJ415+NY8NAZhpqM22DiKslf23m6TOClS5aS2Ix9
+7vlfI5jsnXdLaZJIdPu3QXnUzGfR6r5uM6x7TUDCNP6pPZY5CvvE7gSU1471XeoB
+yMzaU2Kj3b8g9HTKnmxfgrlh8KVlw6dCXa1kR65l8xk4UM85ihud84Qa2ry8lmjK
+E0HkDXrU1UZQaD31yp6UucgvbYbN9ESpv4Mp38tD8YoNnLV4NjgxrVz8dZ9cCKG+
+QxUzL2GTtpvqy+3Vlx3qhi2Axx8u/WTBL9WHaNfYsHiC+7a0KNdN/gcwr/hgqJ+C
+IVXhQ/Uf0BbQSJCQQUDIV8TOKJehZGg1+PdCmbmMcV7G6D4uklANMvdB48gojSun
+DuodeafiN1Vy3jVMhd9qAXuHh9nDjCfYvuFuJog4k9D6+hxu4pbQvkD5ZDkNTd+8
+2Nx/bep3VATg919GaLiJjM+P01LpW3W5aZG6nb1pCuvys+hLV1siYksVSYUWObS2
+1N89xaApLyWs25r55gq/
+=ZCg4
+-END PGP SIGNATURE-

Added: release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.md5
==
--- release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.md5 (added)
+++ release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.md5 Tue Jul 12 18:46:12 2016
@@ -0,0 +1 @@
+hbase-1.2.2-bin.tar.gz: B3 AA B7 7A 54 BF B0 AA  22 6E AC 48 52 14 A8 40

Added: release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.mds
==
--- release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.mds (added)
+++ release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.mds Tue Jul 12 18:46:12 2016
@@ -0,0 +1,17 @@
+hbase-1.2.2-bin.tar.gz:MD5 = B3 AA B7 7A 54 BF B0 AA  22 6E AC 48 52 14 A8
+ 40
+hbase-1.2.2-bin.tar.gz:   SHA1 = C36A 1614 2DFA C058 B6E0  476A 95F8 ECA2 702A
+ A8D2
+hbase-1.2.2-bin.tar.gz: RMD160 = 6506 E85B F64F E2D8 5084  ED05 C0B0 D24D 8426
+ 365B
+hbase-1.2.2-bin.tar.gz: SHA224 = 7A8B3A0B 2D943A26 8B57F13A 52A94960 3B120B16
+ B3A878E2 98FF4457
+hbase-1.2.2-bin.tar.gz: SHA256 = 8C9CC9A1 9E3F4A31 37509513 B5C1B9E1 B5A2283E
+ D261D44A F9AF1C02 C5453C20
+hbase-1.2.2-bin.tar.gz: SHA384 = 94EB1BB2 841658CF B33102E4 7C31819E EF403A51
+ B50B23BD 4DA03B68 F27A776C AF947B2D BF249A1A
+ A78531FB CB47A9A5
+hbase-1.2.2-bin.tar.gz: SHA512 = 386C7F7A BB937BEC 90E91C88 F21C5539 BBEF6253
+ 09514148 118371D7 63F1C8B4 58D84AB2 68D701EF
+ 5D480929 341D57F3 3A5A15C6 5C62C417 CC21E897
+ AEE97334

Added: release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.sha
==
--- release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.sha (added)
+++ release/hbase/1.2.2/hbase-1.2.2-bin.tar.gz.sha Tue Jul 12 18:46:12 2016
@@ -0,0 +1,3 @@
+hbase-1.2.2-bin.tar.gz: 386C7F7A BB937BEC 90E91C88 F21C5539 BBEF6253 09514148
+118371D7 63F1C8B4 58D84AB2 68D701EF 5D480929 341D57F3
+3A5A15C6 5C62C417 CC21E897 AEE97334

Added: release/hbase/1.2.2/hbase-1.2.2-src.tar.gz
==
Binary file - no diff available.

Propchange: release/hbase/1.2.2/hbase-1.2.2-src.tar.gz
--
svn:mime-type = application/octet-stream

Added: release/

[1/2] hbase git commit: HBASE-16092 Procedure v2 - complete child procedure support

2016-07-12 Thread mbertozzi
Repository: hbase
Updated Branches:
  refs/heads/branch-1 36a48a3cf -> 176598860
  refs/heads/master 2650711e9 -> 8cfaa0e72


HBASE-16092 Procedure v2 - complete child procedure support


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8cfaa0e7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8cfaa0e7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8cfaa0e7

Branch: refs/heads/master
Commit: 8cfaa0e72101176f00fa333e6cf6cbea7b2aca44
Parents: 2650711
Author: Matteo Bertozzi 
Authored: Tue Jul 12 10:23:02 2016 -0700
Committer: Matteo Bertozzi 
Committed: Tue Jul 12 10:23:02 2016 -0700

--
 .../hadoop/hbase/procedure2/Procedure.java  |   7 +-
 .../hbase/procedure2/ProcedureExecutor.java |  71 +--
 .../hbase/procedure2/RootProcedureState.java|  56 --
 .../procedure2/store/NoopProcedureStore.java|   5 +
 .../hbase/procedure2/store/ProcedureStore.java  |  10 +-
 .../procedure2/store/ProcedureStoreTracker.java |   9 +
 .../store/wal/ProcedureWALFormat.java   |  18 +-
 .../store/wal/ProcedureWALFormatReader.java |  63 +-
 .../procedure2/store/wal/WALProcedureStore.java |  33 +++-
 .../procedure2/ProcedureTestingUtility.java |  54 +-
 .../hbase/procedure2/TestChildProcedures.java   | 191 +++
 .../store/wal/TestStressWALProcedureStore.java  |   8 +-
 .../store/wal/TestWALProcedureStore.java|  48 -
 .../protobuf/generated/ProcedureProtos.java | 187 --
 .../src/main/protobuf/Procedure.proto   |   1 +
 15 files changed, 679 insertions(+), 82 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8cfaa0e7/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
index ee61841..b401871 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
@@ -589,6 +589,11 @@ public abstract class Procedure implements 
Comparable {
 return --childrenLatch == 0;
   }
 
+  @InterfaceAudience.Private
+  protected synchronized boolean hasChildren() {
+return childrenLatch > 0;
+  }
+
   /**
* Called by the RootProcedureState on procedure execution.
* Each procedure store its stack-index positions.
@@ -606,7 +611,7 @@ public abstract class Procedure implements 
Comparable {
 
   @InterfaceAudience.Private
   protected synchronized boolean removeStackIndex() {
-if (stackIndexes.length > 1) {
+if (stackIndexes != null && stackIndexes.length > 1) {
   stackIndexes = Arrays.copyOf(stackIndexes, stackIndexes.length - 1);
   return false;
 } else {

http://git-wip-us.apache.org/repos/asf/hbase/blob/8cfaa0e7/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index 9d71f65..198623d 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -18,16 +18,18 @@
 
 package org.apache.hadoop.hbase.procedure2;
 
+import com.google.common.base.Preconditions;
+
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-import java.util.HashSet;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
@@ -55,8 +57,6 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.NonceKey;
 import org.apache.hadoop.hbase.util.Pair;
 
-import com.google.common.base.Preconditions;
-
 /**
  * Thread Pool that executes the submitted procedures.
  * The executor has a ProcedureStore associated.
@@ -314,7 +314,7 @@ public class ProcedureExecutor {
   corruptedCount++;
 }
 if (abortOnCorruption && corruptedCount > 0) {
-  throw new IOException("found " + corruptedCount + " procedures on 
replay");
+  throw new IOException("found " + corruptedCount + " corrupted 
procedure(s) on replay");
 }

[2/2] hbase git commit: HBASE-16092 Procedure v2 - complete child procedure support

2016-07-12 Thread mbertozzi
HBASE-16092 Procedure v2 - complete child procedure support


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/17659886
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/17659886
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/17659886

Branch: refs/heads/branch-1
Commit: 1765988609741a1a341734ed7c3716157f5b111b
Parents: 36a48a3
Author: Matteo Bertozzi 
Authored: Tue Jul 12 10:23:02 2016 -0700
Committer: Matteo Bertozzi 
Committed: Tue Jul 12 11:05:36 2016 -0700

--
 .../hadoop/hbase/procedure2/Procedure.java  |   7 +-
 .../hbase/procedure2/ProcedureExecutor.java |  71 +--
 .../hbase/procedure2/RootProcedureState.java|  56 --
 .../procedure2/store/NoopProcedureStore.java|   5 +
 .../hbase/procedure2/store/ProcedureStore.java  |  10 +-
 .../procedure2/store/ProcedureStoreTracker.java |   9 +
 .../store/wal/ProcedureWALFormat.java   |  18 +-
 .../store/wal/ProcedureWALFormatReader.java |  63 +-
 .../procedure2/store/wal/WALProcedureStore.java |  33 +++-
 .../procedure2/ProcedureTestingUtility.java |  54 +-
 .../hbase/procedure2/TestChildProcedures.java   | 191 +++
 .../store/wal/TestStressWALProcedureStore.java  |   8 +-
 .../store/wal/TestWALProcedureStore.java|  48 -
 .../protobuf/generated/ProcedureProtos.java | 187 --
 .../src/main/protobuf/Procedure.proto   |   1 +
 15 files changed, 679 insertions(+), 82 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/17659886/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
index 2cc67be..1a1ea2f 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
@@ -565,6 +565,11 @@ public abstract class Procedure implements 
Comparable {
 return --childrenLatch == 0;
   }
 
+  @InterfaceAudience.Private
+  protected synchronized boolean hasChildren() {
+return childrenLatch > 0;
+  }
+
   /**
* Called by the RootProcedureState on procedure execution.
* Each procedure store its stack-index positions.
@@ -582,7 +587,7 @@ public abstract class Procedure implements 
Comparable {
 
   @InterfaceAudience.Private
   protected synchronized boolean removeStackIndex() {
-if (stackIndexes.length > 1) {
+if (stackIndexes != null && stackIndexes.length > 1) {
   stackIndexes = Arrays.copyOf(stackIndexes, stackIndexes.length - 1);
   return false;
 } else {

http://git-wip-us.apache.org/repos/asf/hbase/blob/17659886/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index 37bcb00..c4f06f1 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -18,16 +18,18 @@
 
 package org.apache.hadoop.hbase.procedure2;
 
+import com.google.common.base.Preconditions;
+
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
-import java.util.HashSet;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
@@ -55,8 +57,6 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.NonceKey;
 import org.apache.hadoop.hbase.util.Pair;
 
-import com.google.common.base.Preconditions;
-
 /**
  * Thread Pool that executes the submitted procedures.
  * The executor has a ProcedureStore associated.
@@ -314,7 +314,7 @@ public class ProcedureExecutor {
   corruptedCount++;
 }
 if (abortOnCorruption && corruptedCount > 0) {
-  throw new IOException("found " + corruptedCount + " procedures on 
replay");
+  throw new IOException("found " + corruptedCount + " corrupted 
procedure(s) on replay");
 }
   }
 });
@@ -388,10 +388,10 @@ public class ProcedureExecutor {
 continue;
   }
 
-  if (proc.hasP

[16/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
index d3973b4..43287e3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionCoprocessorEnvironment.html
@@ -152,11 +152,11 @@
 
 
 private RegionCoprocessorEnvironment
-MultiRowMutationEndpoint.env 
+AggregateImplementation.env 
 
 
 private RegionCoprocessorEnvironment
-AggregateImplementation.env 
+MultiRowMutationEndpoint.env 
 
 
 private RegionCoprocessorEnvironment
@@ -1948,13 +1948,13 @@
 
 
 void
-VisibilityLabelService.init(RegionCoprocessorEnvironment e)
-System calls this after opening of regions.
-
+DefaultVisibilityLabelServiceImpl.init(RegionCoprocessorEnvironment e) 
 
 
 void
-DefaultVisibilityLabelServiceImpl.init(RegionCoprocessorEnvironment e) 
+VisibilityLabelService.init(RegionCoprocessorEnvironment e)
+System calls this after opening of regions.
+
 
 
 private void

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
index f76650a..a6609dd 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/RegionServerCoprocessorEnvironment.html
@@ -110,24 +110,17 @@
 
 
 ReplicationEndpoint
-BaseRegionServerObserver.postCreateReplicationEndPoint(ObserverContext ctx,
-  ReplicationEndpoint endpoint) 
-
-
-ReplicationEndpoint
 RegionServerObserver.postCreateReplicationEndPoint(ObserverContext ctx,
   ReplicationEndpoint endpoint)
 This will be called after the replication endpoint is 
instantiated.
 
 
-
-void
-BaseRegionServerObserver.postMerge(ObserverContext c,
-  Region regionA,
-  Region regionB,
-  Region mergedRegion) 
-
 
+ReplicationEndpoint
+BaseRegionServerObserver.postCreateReplicationEndPoint(ObserverContext ctx,
+  ReplicationEndpoint endpoint) 
+
+
 void
 RegionServerObserver.postMerge(ObserverContext c,
   Region regionA,
@@ -136,14 +129,14 @@
 called after the regions merge.
 
 
-
+
 void
-BaseRegionServerObserver.postMergeCommit(ObserverContext ctx,
-  Region regionA,
-  Region regionB,
-  Region mergedRegion) 
+BaseRegionServerObserver.postMerge(ObserverContext c,
+  Region regionA,
+  Region regionB,
+  Region mergedRegion) 
 
-
+
 void
 RegionServerObserver.postMergeCommit(ObserverContext ctx,
   Region regionA,
@@ -152,13 +145,14 @@
 This will be called after PONR step as part of regions 
merge transaction.
 
 
-
+
 void
-BaseRegionServerObserver.postReplicateLogEntries(ObserverContext ctx,
-  http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List entries,
-  CellScanner cells) 
+BaseRegionServerObserver.postMergeCommit(ObserverContext ctx,
+  Region regionA,
+  Region regionB,
+  Region mergedRegion) 
 
-
+
 void
 RegionServerObserver.postReplicateLogEntries(ObserverContext ctx,
   http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List entries,
@@ -166,13 +160,13 @@
 This will be cal

[51/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
Published site at 2650711e944244b3b87e6d6805b7716b216e8786.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/27849820
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/27849820
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/27849820

Branch: refs/heads/asf-site
Commit: 27849820790c53417eac1183e2e9a314596a90e1
Parents: e29c39f
Author: jenkins 
Authored: Tue Jul 12 14:58:55 2016 +
Committer: Sean Busbey 
Committed: Tue Jul 12 10:39:32 2016 -0500

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 12913 ++--
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 apidocs/constant-values.html|28 +
 apidocs/deprecated-list.html|35 +-
 apidocs/index-all.html  |35 +-
 apidocs/org/apache/hadoop/hbase/HConstants.html |   374 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   | 4 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |   246 +-
 .../hadoop/hbase/class-use/ServerName.html  | 4 +-
 .../hadoop/hbase/class-use/TableName.html   | 4 +-
 .../hadoop/hbase/client/CompactionState.html| 4 +-
 .../apache/hadoop/hbase/client/Durability.html  | 4 +-
 .../hadoop/hbase/client/IsolationLevel.html | 4 +-
 .../hadoop/hbase/client/SnapshotType.html   | 4 +-
 .../hbase/client/class-use/Durability.html  |18 +-
 .../hadoop/hbase/client/class-use/Mutation.html | 8 +-
 .../hadoop/hbase/client/class-use/Result.html   |30 +-
 .../hadoop/hbase/client/class-use/Row.html  | 4 +-
 .../hadoop/hbase/client/class-use/Scan.html |12 +-
 .../hadoop/hbase/client/package-tree.html   | 6 +-
 .../hbase/filter/CompareFilter.CompareOp.html   | 4 +-
 .../filter/class-use/Filter.ReturnCode.html |62 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |42 +-
 .../hadoop/hbase/filter/package-tree.html   | 4 +-
 .../org/apache/hadoop/hbase/io/TimeRange.html   |   151 +-
 .../io/class-use/ImmutableBytesWritable.html|30 +-
 .../hadoop/hbase/io/class-use/TimeRange.html| 8 +-
 .../hbase/io/encoding/DataBlockEncoding.html| 4 +-
 .../mapreduce/class-use/TableRecordReader.html  | 4 +-
 .../apache/hadoop/hbase/quotas/QuotaType.html   | 4 +-
 .../hbase/quotas/ThrottlingException.Type.html  | 4 +-
 .../hadoop/hbase/quotas/package-tree.html   | 4 +-
 .../hadoop/hbase/regionserver/BloomType.html| 4 +-
 .../hadoop/hbase/util/class-use/Order.html  |42 +-
 .../util/class-use/PositionedByteRange.html |   386 +-
 apidocs/overview-tree.html  |20 +-
 .../org/apache/hadoop/hbase/HConstants.html |  1305 +-
 .../org/apache/hadoop/hbase/client/Query.html   | 4 +-
 .../org/apache/hadoop/hbase/io/TimeRange.html   |   397 +-
 book.html   |11 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 18438 +
 checkstyle.rss  |48 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/constant-values.html |37 +-
 devapidocs/deprecated-list.html |   421 +-
 devapidocs/index-all.html   |85 +-
 .../apache/hadoop/hbase/HConstants.Modify.html  |18 +-
 .../org/apache/hadoop/hbase/HConstants.html |   376 +-
 .../HealthChecker.HealthCheckerExitStatus.html  | 4 +-
 .../apache/hadoop/hbase/KeepDeletedCells.html   | 4 +-
 .../org/apache/hadoop/hbase/KeyValueUtil.html   | 2 +-
 .../hadoop/hbase/class-use/Abortable.html   |81 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html |  1017 +-
 .../hadoop/hbase/class-use/CellComparator.html  |   148 +-
 .../hadoop/hbase/class-use/CellScanner.html |96 +-
 .../hadoop/hbase/class-use/ClusterStatus.html   |20 +-
 .../hadoop/hbase/class-use/Coprocessor.html |12 +-
 .../hbase/class-use/CoprocessorEnvironment.html |44 +-
 .../hbase/class-use/HBaseIOException.html   | 8 +-
 .../hbase/class-use/HColumnDescriptor.html  |   314 +-
 .../hadoop/hbase/class-use/HRegionInfo.html |   462 +-
 .../hadoop/hbase/class-use/HRegionLocation.html |24 +-
 .../hbase/class-use/HTableDescriptor.html   |   406 +-
 .../InterProcessLock.MetadataHandler.html   | 8 +-
 .../apache/hadoop/hbase/class-use/KeyValue.html |22 +-
 .../hbase/class-use/Nam

[37/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index db1d0dc..065c5ed 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -198,6 +198,10 @@
  
 abortable
 - Variable in class org.apache.hadoop.hbase.regionserver.snapshot.RegionServerSnapshotManager.SnapshotSubprocedurePool
  
+abortable
 - Variable in class org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint
+ 
+abortable
 - Variable in class org.apache.hadoop.hbase.replication.ReplicationEndpoint.Context
+ 
 abortable
 - Variable in class org.apache.hadoop.hbase.replication.ReplicationPeersZKImpl
  
 abortable
 - Variable in class org.apache.hadoop.hbase.replication.ReplicationStateZKBase
@@ -911,7 +915,7 @@
 
 Write an update
 
-add(Cell)
 - Method in class org.apache.hadoop.hbase.regionserver.MutableSegment
+add(Cell,
 boolean) - Method in class org.apache.hadoop.hbase.regionserver.MutableSegment
 
 Adds the given cell into the segment
 
@@ -7897,6 +7901,8 @@
 
 check(DataTransferProtos.DataTransferEncryptorMessageProto)
 - Method in class org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutputSaslHelper.SaslNegotiateHandler
  
+check(long,
 long) - Static method in class org.apache.hadoop.hbase.io.TimeRange
+ 
 check - 
Variable in class org.apache.hadoop.hbase.rest.RowResource
  
 CHECK_AUTHS_FOR_MUTATION
 - Static variable in class org.apache.hadoop.hbase.security.visibility.VisibilityConstants
@@ -11811,7 +11817,7 @@
  
 compare(long)
 - Method in class org.apache.hadoop.hbase.io.TimeRange
 
-Compare the timestamp to timerange
+Compare the timestamp to timerange.
 
 compare(CallRunner,
 CallRunner) - Method in class org.apache.hadoop.hbase.ipc.SimpleRpcScheduler.CallPriorityComparator
  
@@ -16647,10 +16653,14 @@
  
 createWriterInTmp(long,
 Compression.Algorithm, boolean, boolean, boolean, boolean) - Method 
in class org.apache.hadoop.hbase.regionserver.HStore
  
+createWriterInTmp(long,
 Compression.Algorithm, boolean, boolean, boolean, boolean, 
TimeRangeTracker) - Method in class 
org.apache.hadoop.hbase.regionserver.HStore
+ 
 createWriterInTmp(long,
 Compression.Algorithm, boolean, boolean, boolean) - Method in 
interface org.apache.hadoop.hbase.regionserver.Store
  
 createWriterInTmp(long,
 Compression.Algorithm, boolean, boolean, boolean, boolean) - Method 
in interface org.apache.hadoop.hbase.regionserver.Store
  
+createWriterInTmp(long,
 Compression.Algorithm, boolean, boolean, boolean, boolean, 
TimeRangeTracker) - Method in interface 
org.apache.hadoop.hbase.regionserver.Store
+ 
 createWriteTable(int)
 - Method in class org.apache.hadoop.hbase.tool.Canary.RegionMonitor
  
 createZooKeeperWatcher()
 - Method in class org.apache.hadoop.hbase.client.replication.ReplicationAdmin
@@ -18309,6 +18319,8 @@
  
 DEFAULT_MAX_SYNC_FAILURE_ROLL
 - Static variable in class org.apache.hadoop.hbase.procedure2.store.wal.WALProcedureStore
  
+DEFAULT_MAX_TERMINATION_WAIT_MULTIPLIER
 - Static variable in class org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint
+ 
 DEFAULT_MAX_WAIT_TIME
 - Static variable in class org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils
 
 By default, wait 300 seconds for a snapshot to 
complete
@@ -26908,6 +26920,8 @@
  
 GET_SIZE_KEY
 - Static variable in interface org.apache.hadoop.hbase.regionserver.MetricsRegionServerSource
  
+getAbortable()
 - Method in class org.apache.hadoop.hbase.replication.ReplicationEndpoint.Context
+ 
 getAbortable()
 - Method in class org.apache.hadoop.hbase.replication.ReplicationQueuesArguments
  
 getAbortNode(ZKProcedureUtil,
 String) - Static method in class 
org.apache.hadoop.hbase.procedure.ZKProcedureUtil
@@ -28148,6 +28162,10 @@
 
 This method exists just to encapsulate how we serialize 
keys.
 
+getCellLength(Cell)
 - Method in class org.apache.hadoop.hbase.regionserver.Segment
+
+Get cell length after serialized in KeyValue
+
 getCellList(byte[])
 - Method in class org.apache.hadoop.hbase.client.Mutation
 
 Creates an empty list if one doesn't exist for the given 
column family
@@ -29305,6 +29323,8 @@
 
 Needed for features such as delayed calls.
 
+getCurrentChunk()
 - Method in class org.apache.hadoop.hbase.regionserver.HeapMemStoreLAB
+ 
 getCurrentCompactedKVs()
 - Method in class org.apache.hadoop.hbase.RegionLoad
  
 getCurrentCompactedKvs()
 - Method in class org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress
@@ -34051,6 +34071,8 @@
  
 getNextForFuzzyRule(boolean,
 byte[], int, int, byte[], byte[]) - Static method in class 
org.apache.hadoop.hbase.filter.FuzzyRowFilter
  
+getNextFreeOffset()
 - Method in class org.apache.hadoop.hbase.regionserver.HeapMemStoreLAB.Chunk
+ 
 getNextIndexedKey()
 - Method in class org.apache.hadoop.hbase.io.hfile.BlockWithScanInfo
  
 getNextIndexe

[42/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
index 5a2923e..8f77b6e 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
@@ -552,728 +552,733 @@
 544
 545  /**
 546   * Timestamp to use when we want to 
refer to the oldest cell.
-547   */
-548  public static final long 
OLDEST_TIMESTAMP = Long.MIN_VALUE;
-549
-550  /**
-551   * LATEST_TIMESTAMP in bytes form
-552   */
-553  public static final byte [] 
LATEST_TIMESTAMP_BYTES = {
-554// big-endian
-555(byte) (LATEST_TIMESTAMP >>> 
56),
-556(byte) (LATEST_TIMESTAMP >>> 
48),
-557(byte) (LATEST_TIMESTAMP >>> 
40),
-558(byte) (LATEST_TIMESTAMP >>> 
32),
-559(byte) (LATEST_TIMESTAMP >>> 
24),
-560(byte) (LATEST_TIMESTAMP >>> 
16),
-561(byte) (LATEST_TIMESTAMP >>> 
8),
-562(byte) LATEST_TIMESTAMP,
-563  };
-564
-565  /**
-566   * Define for 'return-all-versions'.
-567   */
-568  public static final int ALL_VERSIONS = 
Integer.MAX_VALUE;
+547   * Special! Used in fake Cells only. 
Should never be the timestamp on an actual Cell returned to
+548   * a client.
+549   * @deprecated Should not be public 
since hbase-1.3.0. For internal use only. Move internal to
+550   * Scanners flagged as special 
timestamp value never to be returned as timestamp on a Cell.
+551   */
+552  @Deprecated
+553  public static final long 
OLDEST_TIMESTAMP = Long.MIN_VALUE;
+554
+555  /**
+556   * LATEST_TIMESTAMP in bytes form
+557   */
+558  public static final byte [] 
LATEST_TIMESTAMP_BYTES = {
+559// big-endian
+560(byte) (LATEST_TIMESTAMP >>> 
56),
+561(byte) (LATEST_TIMESTAMP >>> 
48),
+562(byte) (LATEST_TIMESTAMP >>> 
40),
+563(byte) (LATEST_TIMESTAMP >>> 
32),
+564(byte) (LATEST_TIMESTAMP >>> 
24),
+565(byte) (LATEST_TIMESTAMP >>> 
16),
+566(byte) (LATEST_TIMESTAMP >>> 
8),
+567(byte) LATEST_TIMESTAMP,
+568  };
 569
 570  /**
-571   * Unlimited time-to-live.
+571   * Define for 'return-all-versions'.
 572   */
-573//  public static final int FOREVER = 
-1;
-574  public static final int FOREVER = 
Integer.MAX_VALUE;
-575
-576  /**
-577   * Seconds in a week
-578   */
-579  public static final int WEEK_IN_SECONDS 
= 7 * 24 * 3600;
+573  public static final int ALL_VERSIONS = 
Integer.MAX_VALUE;
+574
+575  /**
+576   * Unlimited time-to-live.
+577   */
+578//  public static final int FOREVER = 
-1;
+579  public static final int FOREVER = 
Integer.MAX_VALUE;
 580
 581  /**
-582   * Seconds in a day, hour and minute
+582   * Seconds in a week
 583   */
-584  public static final int DAY_IN_SECONDS 
= 24 * 60 * 60;
-585  public static final int HOUR_IN_SECONDS 
= 60 * 60;
-586  public static final int 
MINUTE_IN_SECONDS = 60;
-587
-588  //TODO: although the following are 
referenced widely to format strings for
-589  //  the shell. They really aren't a 
part of the public API. It would be
-590  //  nice if we could put them 
somewhere where they did not need to be
-591  //  public. They could have package 
visibility
-592  public static final String NAME = 
"NAME";
-593  public static final String VERSIONS = 
"VERSIONS";
-594  public static final String IN_MEMORY = 
"IN_MEMORY";
-595  public static final String METADATA = 
"METADATA";
-596  public static final String 
CONFIGURATION = "CONFIGURATION";
-597
-598  /**
-599   * Retrying we multiply 
hbase.client.pause setting by what we have in this array until we
-600   * run out of array items.  Retries 
beyond this use the last number in the array.  So, for
-601   * example, if hbase.client.pause is 1 
second, and maximum retries count
-602   * hbase.client.retries.number is 10, 
we will retry at the following intervals:
-603   * 1, 2, 3, 5, 10, 20, 40, 100, 100, 
100.
-604   * With 100ms, a back-off of 200 means 
20s
-605   */
-606  public static final int [] 
RETRY_BACKOFF = {1, 2, 3, 5, 10, 20, 40, 100, 100, 100, 100, 200, 200};
-607
-608  public static final String REGION_IMPL 
= "hbase.hregion.impl";
-609
-610  /** modifyTable op for replacing the 
table descriptor */
-611  @InterfaceAudience.Private
-612  public static enum Modify {
-613CLOSE_REGION,
-614TABLE_COMPACT,
-615TABLE_FLUSH,
-616TABLE_MAJOR_COMPACT,
-617TABLE_SET_HTD,
-618TABLE_SPLIT
-619  }
-620
-621  /**
-622   * Scope tag for locally scoped data.
-623   * This data will not be replicated.
-624   */
-625  public static final int 
REPLICATION_SCOPE_LOCAL = 0;
-626
-627  /**
-628   * Scope tag for globally scoped 
data.
-629   * This data will be replicated to all 
peers.
-630   */
-631  public static final int 
REPLICATION_SCOPE_GLOBAL = 1;
-632
-633  /**
-634   * Default cluster ID, cannot be used 
to identify a 

[12/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
index a24194b..335d5c4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDecodingContext.html
@@ -205,30 +205,30 @@
 
 
 DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
+DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-DataBlockEncoder.createSeeker(CellComparator comparator,
-HFileBlockDecodingContext decodingCtx)
-Create a HFileBlock seeker which find KeyValues within a 
block.
-
+PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
+HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
+FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
-HFileBlockDecodingContext decodingCtx) 
+DataBlockEncoder.createSeeker(CellComparator comparator,
+HFileBlockDecodingContext decodingCtx)
+Create a HFileBlock seeker which find KeyValues within a 
block.
+
 
 
 http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
index 6841665..d8e55a5 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/class-use/HFileBlockDefaultDecodingContext.html
@@ -109,35 +109,35 @@
 
 
 protected http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-CopyKeyDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
+DiffKeyDeltaEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
   int allocateHeaderLength,
   int skipLastBytes,
   HFileBlockDefaultDecodingContext decodingCtx) 
 
 
-protected abstract http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-BufferedDataBlockEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
+protected http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
+PrefixKeyDeltaEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
   int allocateHeaderLength,
   int skipLastBytes,
   HFileBlockDefaultDecodingContext decodingCtx) 
 
 
-protected http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in java.nio">ByteBuffer
-FastDiffDeltaEncoder.internalDecodeKeyValues(http://docs.oracle.com/javase/7/docs/api/java/io/DataInputStream.html?is-external=true";
 title="class or interface in java.io">DataInputStream source,
+protected abstract http://docs.oracle.com/ja

[40/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 807b442..efc9c02 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -283,7 +283,7 @@
 1783
 0
 0
-11573
+11578
 
 Files
 
@@ -398,3370 +398,3380 @@
 0
 27
 
+org/apache/hadoop/hbase/HConstants.java
+0
+0
+1
+
 org/apache/hadoop/hbase/HRegionInfo.java
 0
 0
 58
-
+
 org/apache/hadoop/hbase/HRegionLocation.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/HTableDescriptor.java
 0
 0
 46
-
+
 org/apache/hadoop/hbase/HealthChecker.java
 0
 0
 17
-
+
 org/apache/hadoop/hbase/JMXListener.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/JitterScheduledThreadPoolExecutorImpl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/KeyValue.java
 0
 0
 135
-
+
 org/apache/hadoop/hbase/KeyValueTestUtil.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/KeyValueUtil.java
 0
 0
 30
-
+
 org/apache/hadoop/hbase/LocalHBaseCluster.java
 0
 0
 23
-
+
 org/apache/hadoop/hbase/MetaMutationAnnotation.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/MetaTableAccessor.java
 0
 0
 112
-
+
 org/apache/hadoop/hbase/NamespaceDescriptor.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/NotAllMetaRegionsOnlineException.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ProcedureUtil.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/RegionLoad.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/RegionLocations.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/RegionStateListener.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/ScheduledChore.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/ServerLoad.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ServerName.java
 0
 0
 34
-
+
 org/apache/hadoop/hbase/SettableSequenceId.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/SettableTimestamp.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/SplitLogCounters.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/SplitLogTask.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/Streamable.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/TableDescriptors.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/TableInfoMissingException.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/TableName.java
 0
 0
 20
-
+
 org/apache/hadoop/hbase/TagType.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/ZKNamespaceManager.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/ZNodeClearer.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/backup/HFileArchiver.java
 0
 0
 17
-
+
 org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/backup/example/LongTermArchivingHFileCleaner.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/backup/example/TableHFileArchiveTracker.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/backup/example/ZKTableArchiveClient.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/classification/tools/ExcludePrivateAnnotationsStandardDoclet.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/classification/tools/IncludePublicAnnotationsStandardDoclet.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/classification/tools/StabilityOptions.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/AbstractClientScanner.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/Action.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/Admin.java
 0
 0
 57
-
+
 org/apache/hadoop/hbase/client/Append.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/AsyncProcess.java
 0
 0
 18
-
+
 org/apache/hadoop/hbase/client/BufferedMutator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/BufferedMutatorImpl.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ClientAsyncPrefetchScanner.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ClientIdGenerator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ClientScanner.java
 0
 0
 205
-
+
 org/apache/hadoop/hbase/client/ClientSimpleScanner.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ClientSmallReversedScanner.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/ClientSmallScanner.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/ClusterStatusListener.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/CompactType.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/ConnectionConfiguration.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/ConnectionImplementation.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/CoprocessorHConnection.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/DelayingRunner.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/Delete.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/FlushRegionCallable.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/Get.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/HBaseAdmin.java
 0
 0
 39
-
+
 org/apache/hadoop/hbase/client/HRegionLocator.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/HTable.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/client/HTableInterface.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/HTableMultiplexer.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/HTableWrapper.java

[26/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
index d9dd294..3f9d94d 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Server.html
@@ -166,13 +166,13 @@
 
 
 
-private Server
-ZKSplitLogManagerCoordination.server 
-
-
 protected Server
 ZkCoordinatedStateManager.server 
 
+
+private Server
+ZKSplitLogManagerCoordination.server 
+
 
 
 
@@ -184,11 +184,11 @@
 
 
 Server
-BaseCoordinatedStateManager.getServer() 
+ZkCoordinatedStateManager.getServer() 
 
 
 Server
-ZkCoordinatedStateManager.getServer() 
+BaseCoordinatedStateManager.getServer() 
 
 
 
@@ -201,11 +201,11 @@
 
 
 void
-BaseCoordinatedStateManager.initialize(Server server) 
+ZkCoordinatedStateManager.initialize(Server server) 
 
 
 void
-ZkCoordinatedStateManager.initialize(Server server) 
+BaseCoordinatedStateManager.initialize(Server server) 
 
 
 
@@ -333,15 +333,15 @@
 
 
 private Server
-CatalogJanitor.server 
+SplitLogManager.server 
 
 
 private Server
-RegionStateStore.server 
+CatalogJanitor.server 
 
 
 private Server
-SplitLogManager.server 
+RegionStateStore.server 
 
 
 
@@ -473,19 +473,19 @@
 
 
 private Server
-SplitTransactionImpl.server 
+HeapMemoryManager.server 
 
 
 private Server
-SplitTransactionImpl.DaughterOpener.server 
+LogRoller.server 
 
 
 private Server
-LogRoller.server 
+SplitTransactionImpl.server 
 
 
 private Server
-HeapMemoryManager.server 
+SplitTransactionImpl.DaughterOpener.server 
 
 
 
@@ -498,23 +498,23 @@
 
 
 Server
-RegionMergeTransaction.getServer()
-Get the Server running the transaction or rollback
-
+RegionMergeTransactionImpl.getServer() 
 
 
 Server
-RegionMergeTransactionImpl.getServer() 
+SplitTransaction.getServer()
+Get the Server running the transaction or rollback
+
 
 
 Server
-SplitTransactionImpl.getServer() 
+RegionMergeTransaction.getServer()
+Get the Server running the transaction or rollback
+
 
 
 Server
-SplitTransaction.getServer()
-Get the Server running the transaction or rollback
-
+SplitTransactionImpl.getServer() 
 
 
 
@@ -550,23 +550,9 @@
 
 
 Region
-RegionMergeTransaction.execute(Server server,
-  RegionServerServices services)
-Deprecated. 
-use #execute(Server, RegionServerServices, 
User)
-
-
-
-
-Region
 RegionMergeTransactionImpl.execute(Server server,
   RegionServerServices services) 
 
-
-PairOfSameType
-SplitTransactionImpl.execute(Server server,
-  RegionServerServices services) 
-
 
 PairOfSameType
 SplitTransaction.execute(Server server,
@@ -578,21 +564,21 @@
 
 
 Region
-RegionMergeTransaction.execute(Server server,
-  RegionServerServices services,
-  User user)
-Run the transaction.
+RegionMergeTransaction.execute(Server server,
+  RegionServerServices services)
+Deprecated. 
+use #execute(Server, RegionServerServices, 
User)
+
 
 
 
-Region
-RegionMergeTransactionImpl.execute(Server server,
-  RegionServerServices services,
-  User user) 
+PairOfSameType
+SplitTransactionImpl.execute(Server server,
+  RegionServerServices services) 
 
 
-PairOfSameType
-SplitTransactionImpl.execute(Server server,
+Region
+RegionMergeTransactionImpl.execute(Server server,
   RegionServerServices services,
   User user) 
 
@@ -605,6 +591,20 @@
 
 
 
+Region
+RegionMergeTransaction.execute(Server server,
+  RegionServerServices services,
+  User user)
+Run the transaction.
+
+
+
+PairOfSameType
+SplitTransactionImpl.execute(Server server,
+  RegionServerServices services,
+  User user) 
+
+
 void
 ReplicationService.initialize(Server rs,
 org.apache.hadoop.fs.FileSystem fs,
@@ -639,23 +639,9 @@
 
 
 boolean
-RegionMergeTransaction.rollback(Server server,
-RegionServerServices services)
-Deprecated. 
-use #rollback(Server, RegionServerServices, 
User)
-
-
-
-
-boolean
 RegionMergeTransactionImpl.rollback(Server server,
 RegionServerServices services) 
 
-
-boolean
-SplitTransactionImpl.rollback(Server server,
-RegionServerServices services) 
-
 
 boolean
 SplitTransaction.rollback(Server server,
@@ -667,21 +653,21 @@
 
 
 boolean
-RegionMergeTransaction.rollback(Server server,
-RegionServerServices services,
-User user)
-Roll back a failed transaction
+RegionMergeTransaction.rollback(Server server,
+RegionServerServices services)
+Deprecated. 
+use #rollback(Server, RegionServerServices, 
User)
+
 
 
 
 boolean
-RegionMergeTransactionImpl.rollback(Server server,
-RegionServerServices servic

[23/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
index 74fe4e6..65d17de 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotDisabledException.html
@@ -100,13 +100,13 @@
 
 
 void
-MasterServices.checkTableModifiable(TableName tableName)
-Check table is modifiable; i.e.
-
+HMaster.checkTableModifiable(TableName tableName) 
 
 
 void
-HMaster.checkTableModifiable(TableName tableName) 
+MasterServices.checkTableModifiable(TableName tableName)
+Check table is modifiable; i.e.
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
index 74ccea7..0698107 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableNotFoundException.html
@@ -163,13 +163,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-MasterServices.checkTableModifiable(TableName tableName)
-Check table is modifiable; i.e.
-
+HMaster.checkTableModifiable(TableName tableName) 
 
 
 void
-HMaster.checkTableModifiable(TableName tableName) 
+MasterServices.checkTableModifiable(TableName tableName)
+Check table is modifiable; i.e.
+
 
 
 
@@ -186,13 +186,13 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-DisableTableHandler
-DisableTableHandler.prepare() 
-
-
 EnableTableHandler
 EnableTableHandler.prepare() 
 
+
+DisableTableHandler
+DisableTableHandler.prepare() 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
index c9d1384..17a77fc 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Tag.html
@@ -767,17 +767,17 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-VisibilityLabelService.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
+DefaultVisibilityLabelServiceImpl.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
   
boolean withSerializationFormat,
-  boolean checkAuths)
-Creates tags corresponding to given visibility 
expression.
-
+  
boolean checkAuths) 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-DefaultVisibilityLabelServiceImpl.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
+VisibilityLabelService.createVisibilityExpTags(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String visExpression,
   
boolean withSerializationFormat,
-  
boolean checkAuths) 
+  boolean checkAuths)
+Creates tags corresponding to given visibility 
expression.
+
 
 
 static http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
@@ -823,6 +823,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 byte[]
+DefaultVisibilityLabelServiceImpl.encodeVisibilityForReplication(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List tags,
+http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in 
java.lang">Byte seriali

[07/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/master/class-use/TableLockManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/TableLockManager.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/TableLockManager.html
index 2cafb00..74cd6ef 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/class-use/TableLockManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/TableLockManager.html
@@ -141,19 +141,19 @@
 
 
 private TableLockManager
-ExpiredMobFileCleanerChore.tableLockManager 
+AssignmentManager.tableLockManager 
 
 
 private TableLockManager
-MasterMobCompactionThread.CompactionRunner.tableLockManager 
+ExpiredMobFileCleanerChore.tableLockManager 
 
 
 private TableLockManager
-MobCompactionChore.tableLockManager 
+MasterMobCompactionThread.CompactionRunner.tableLockManager 
 
 
 private TableLockManager
-AssignmentManager.tableLockManager 
+MobCompactionChore.tableLockManager 
 
 
 
@@ -239,11 +239,11 @@
 
 
 private TableLockManager
-DisableTableHandler.tableLockManager 
+EnableTableHandler.tableLockManager 
 
 
 private TableLockManager
-EnableTableHandler.tableLockManager 
+DisableTableHandler.tableLockManager 
 
 
 
@@ -413,13 +413,13 @@
 
 
 
-private TableLockManager
-HMobStore.tableLockManager 
-
-
 protected TableLockManager
 HRegionServer.tableLockManager 
 
+
+private TableLockManager
+HMobStore.tableLockManager 
+
 
 
 
@@ -431,11 +431,11 @@
 
 
 TableLockManager
-RegionServerServices.getTableLockManager() 
+HRegionServer.getTableLockManager() 
 
 
 TableLockManager
-HRegionServer.getTableLockManager() 
+RegionServerServices.getTableLockManager() 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/master/class-use/TableStateManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/class-use/TableStateManager.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/TableStateManager.html
index c7a5273..0819182 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/class-use/TableStateManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/TableStateManager.html
@@ -96,7 +96,7 @@
 
 
 private TableStateManager
-RegionStates.tableStateManager 
+AssignmentManager.tableStateManager 
 
 
 private TableStateManager
@@ -104,7 +104,7 @@
 
 
 private TableStateManager
-AssignmentManager.tableStateManager 
+RegionStates.tableStateManager 
 
 
 
@@ -117,7 +117,7 @@
 
 
 TableStateManager
-MasterServices.getTableStateManager() 
+AssignmentManager.getTableStateManager() 
 
 
 TableStateManager
@@ -125,7 +125,7 @@
 
 
 TableStateManager
-AssignmentManager.getTableStateManager() 
+MasterServices.getTableStateManager() 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/NormalizationPlan.PlanType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/NormalizationPlan.PlanType.html
 
b/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/NormalizationPlan.PlanType.html
index f7da6ed..61960a0 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/NormalizationPlan.PlanType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/NormalizationPlan.PlanType.html
@@ -96,7 +96,7 @@
 
 
 NormalizationPlan.PlanType
-MergeNormalizationPlan.getType() 
+NormalizationPlan.getType() 
 
 
 NormalizationPlan.PlanType
@@ -104,11 +104,11 @@
 
 
 NormalizationPlan.PlanType
-EmptyNormalizationPlan.getType() 
+MergeNormalizationPlan.getType() 
 
 
 NormalizationPlan.PlanType
-NormalizationPlan.getType() 
+EmptyNormalizationPlan.getType() 
 
 
 static NormalizationPlan.PlanType
@@ -134,25 +134,25 @@ the order they are declared.
 
 
 long
-RegionNormalizer.getSkippedCount(NormalizationPlan.PlanType type) 
+SimpleRegionNormalizer.getSkippedCount(NormalizationPlan.PlanType type) 
 
 
 long
-SimpleRegionNormalizer.getSkippedCount(NormalizationPlan.PlanType type) 
+RegionNormalizer.getSkippedCount(NormalizationPlan.PlanType type) 
 
 
 void
+SimpleRegionNormalizer.planSkipped(HRegionInfo hri,
+  NormalizationPlan.PlanType type) 
+
+
+void
 RegionNormalizer.planSkipped(HRegionInfo hri,
   NormalizationPlan.PlanType type)
 Notification for the case where plan couldn't be executed 
due to constraint violation, such as
  namespace quota
 
 
-
-void
-SimpleRegionNormalizer.planSkipped(HRegionInfo hri,
-  NormalizationPlan.PlanType type) 
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/master/normalizer/class-use/NormalizationPlan.html

[36/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HConstants.html 
b/devapidocs/org/apache/hadoop/hbase/HConstants.html
index dcf9f6b..2edd6d9 100644
--- a/devapidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/devapidocs/org/apache/hadoop/hbase/HConstants.html
@@ -1354,7 +1354,10 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 static long
 OLDEST_TIMESTAMP
-Timestamp to use when we want to refer to the oldest 
cell.
+Deprecated. 
+Should not be public since hbase-1.3.0. For internal use 
only. Move internal to
+ Scanners flagged as special timestamp value never to be returned as timestamp 
on a Cell.
+
 
 
 
@@ -3253,8 +3256,13 @@ public static final byte[] 
 
 OLDEST_TIMESTAMP
-public static final long OLDEST_TIMESTAMP
-Timestamp to use when we want to refer to the oldest 
cell.
+http://docs.oracle.com/javase/7/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
+public static final long OLDEST_TIMESTAMP
+Deprecated. Should not 
be public since hbase-1.3.0. For internal use only. Move internal to
+ Scanners flagged as special timestamp value never to be returned as timestamp 
on a Cell.
+Timestamp to use when we want to refer to the oldest cell.
+ Special! Used in fake Cells only. Should never be the timestamp on an actual 
Cell returned to
+ a client.
 See Also:Constant
 Field Values
 
 
@@ -3264,7 +3272,7 @@ public static final byte[] 
 
 LATEST_TIMESTAMP_BYTES
-public static final byte[] LATEST_TIMESTAMP_BYTES
+public static final byte[] LATEST_TIMESTAMP_BYTES
 LATEST_TIMESTAMP in bytes form
 
 
@@ -3274,7 +3282,7 @@ public static final byte[] 
 
 ALL_VERSIONS
-public static final int ALL_VERSIONS
+public static final int ALL_VERSIONS
 Define for 'return-all-versions'.
 See Also:Constant
 Field Values
 
@@ -3285,7 +3293,7 @@ public static final byte[] 
 
 FOREVER
-public static final int FOREVER
+public static final int FOREVER
 Unlimited time-to-live.
 See Also:Constant
 Field Values
 
@@ -3296,7 +3304,7 @@ public static final byte[] 
 
 WEEK_IN_SECONDS
-public static final int WEEK_IN_SECONDS
+public static final int WEEK_IN_SECONDS
 Seconds in a week
 See Also:Constant
 Field Values
 
@@ -3307,7 +3315,7 @@ public static final byte[] 
 
 DAY_IN_SECONDS
-public static final int DAY_IN_SECONDS
+public static final int DAY_IN_SECONDS
 Seconds in a day, hour and minute
 See Also:Constant
 Field Values
 
@@ -3318,7 +3326,7 @@ public static final byte[] 
 
 HOUR_IN_SECONDS
-public static final int HOUR_IN_SECONDS
+public static final int HOUR_IN_SECONDS
 See Also:Constant
 Field Values
 
 
@@ -3328,7 +3336,7 @@ public static final byte[] 
 
 MINUTE_IN_SECONDS
-public static final int MINUTE_IN_SECONDS
+public static final int MINUTE_IN_SECONDS
 See Also:Constant
 Field Values
 
 
@@ -3338,7 +3346,7 @@ public static final byte[] 
 
 NAME
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String NAME
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String NAME
 See Also:Constant
 Field Values
 
 
@@ -3348,7 +3356,7 @@ public static final byte[] 
 
 VERSIONS
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String VERSIONS
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String VERSIONS
 See Also:Constant
 Field Values
 
 
@@ -3358,7 +3366,7 @@ public static final byte[] 
 
 IN_MEMORY
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String IN_MEMORY
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String IN_MEMORY
 See Also:Constant
 Field Values
 
 
@@ -3368,7 +3376,7 @@ public static final byte[] 
 
 METADATA
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String METADATA
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String METADATA
 See Also:Constant
 Field Values
 
 
@@ -3378,7 +3386,7 @@ public static final byte[] 
 
 CONFIGURATION
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CONFIGURATION
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=tru

[25/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/SplitLogTask.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/SplitLogTask.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/SplitLogTask.html
index 0855b61..73ded02 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/SplitLogTask.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/SplitLogTask.html
@@ -147,19 +147,19 @@
 
 
 void
-ZkSplitLogWorkerCoordination.endTask(SplitLogTask slt,
+SplitLogWorkerCoordination.endTask(SplitLogTask slt,
   http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong ctr,
-  SplitLogWorkerCoordination.SplitTaskDetails details)
-endTask() can fail and the only way to recover out of it is 
for the
- SplitLogManager to timeout the 
task node.
+  SplitLogWorkerCoordination.SplitTaskDetails splitTaskDetails)
+Notify coordination engine that splitting task has 
completed.
 
 
 
 void
-SplitLogWorkerCoordination.endTask(SplitLogTask slt,
+ZkSplitLogWorkerCoordination.endTask(SplitLogTask slt,
   http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong ctr,
-  SplitLogWorkerCoordination.SplitTaskDetails splitTaskDetails)
-Notify coordination engine that splitting task has 
completed.
+  SplitLogWorkerCoordination.SplitTaskDetails details)
+endTask() can fail and the only way to recover out of it is 
for the
+ SplitLogManager to timeout the 
task node.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
index 615907d..7be3309 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableDescriptors.html
@@ -118,11 +118,11 @@
 
 
 TableDescriptors
-MasterServices.getTableDescriptors() 
+HMaster.getTableDescriptors() 
 
 
 TableDescriptors
-HMaster.getTableDescriptors() 
+MasterServices.getTableDescriptors() 
 
 
 
@@ -215,13 +215,14 @@
 
 
 
-ReplicationEndpoint.Context(org.apache.hadoop.conf.Configuration conf,
+ReplicationEndpoint.Context(org.apache.hadoop.conf.Configuration conf,
   
org.apache.hadoop.fs.FileSystem fs,
   http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String peerId,
   http://docs.oracle.com/javase/7/docs/api/java/util/UUID.html?is-external=true";
 title="class or interface in java.util">UUID clusterId,
   ReplicationPeer replicationPeer,
   MetricsSource metrics,
-  TableDescriptors tableDescriptors) 
+  TableDescriptors tableDescriptors,
+  Abortable abortable) 
 
 
 



[44/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html 
b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
index 2f3ad05..99a410e 100644
--- a/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
+++ b/apidocs/org/apache/hadoop/hbase/util/class-use/PositionedByteRange.html
@@ -116,105 +116,105 @@
 
 
 
-T
-DataType.decode(PositionedByteRange src)
-Read an instance of T from the buffer 
src.
-
+T
+FixedLengthWrapper.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Number.html?is-external=true";
 title="class or interface in java.lang">Number
-OrderedNumeric.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
+OrderedInt16.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
-RawLong.decode(PositionedByteRange src) 
+byte[]
+OrderedBlob.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
-RawShort.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
+OrderedInt64.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[]
-Struct.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Number.html?is-external=true";
 title="class or interface in java.lang">Number
+OrderedNumeric.decode(PositionedByteRange src) 
 
 
-T
-FixedLengthWrapper.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+OrderedString.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
-RawByte.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
+RawShort.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-RawString.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
+RawLong.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Byte.html?is-external=true";
 title="class or interface in java.lang">Byte
-OrderedInt8.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
+RawInteger.decode(PositionedByteRange src) 
 
 
-byte[]
-RawBytes.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
+RawDouble.decode(PositionedByteRange src) 
 
 
-T
-TerminatedWrapper.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+RawString.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-OrderedString.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html?is-external=true";
 title="class or interface in java.lang">Integer
+OrderedInt32.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
-OrderedInt64.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
+OrderedFloat64.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/javase/7/docs/api/java/lang/Short.html?is-external=true";
 title="class or interface in java.lang">Short
-OrderedInt16.decode(PositionedByteRange src) 
+byte[]
+RawBytes.decode(PositionedByteRange src) 
 
 
-byte[]
-OrderedBlobVar.decode(PositionedByteRange src) 
+http://docs.oracle.com/javase/7/docs/api/java/lang/Float.html?is-external=true";
 title="class or interface in java.lang">Float
+OrderedFloat32.decode(PositionedByteRange src) 
 
 
-http://docs.oracle.com/ja

[45/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 02c305c..98310da 100644
--- a/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/apidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -382,43 +382,43 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-DependentColumnFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+FirstKeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+PrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-InclusiveStopFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+TimestampsFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-SingleColumnValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+QualifierFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-QualifierFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+PageFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-KeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+SingleColumnValueExcludeFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-PrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-MultipleColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+SingleColumnValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-PageFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnCountGetFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-ColumnCountGetFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+MultipleColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
@@ -426,31

[32/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
index 8be1722..b7b694c 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CoprocessorEnvironment.html
@@ -350,15 +350,11 @@
 
 
 void
-MultiRowMutationEndpoint.start(CoprocessorEnvironment env)
-Stores a reference to the coprocessor environment provided 
by the
- RegionCoprocessorHost 
from the region where this
- coprocessor is loaded.
-
+BaseRegionObserver.start(CoprocessorEnvironment e) 
 
 
 void
-BaseRegionObserver.start(CoprocessorEnvironment e) 
+BaseMasterAndRegionObserver.start(CoprocessorEnvironment ctx) 
 
 
 void
@@ -370,15 +366,19 @@
 
 
 void
-BaseRegionServerObserver.start(CoprocessorEnvironment env) 
+MultiRowMutationEndpoint.start(CoprocessorEnvironment env)
+Stores a reference to the coprocessor environment provided 
by the
+ RegionCoprocessorHost 
from the region where this
+ coprocessor is loaded.
+
 
 
 void
-BaseMasterAndRegionObserver.start(CoprocessorEnvironment ctx) 
+BaseMasterObserver.start(CoprocessorEnvironment ctx) 
 
 
 void
-BaseMasterObserver.start(CoprocessorEnvironment ctx) 
+BaseRegionServerObserver.start(CoprocessorEnvironment env) 
 
 
 void
@@ -394,11 +394,11 @@
 
 
 void
-MultiRowMutationEndpoint.stop(CoprocessorEnvironment env) 
+BaseRegionObserver.stop(CoprocessorEnvironment e) 
 
 
 void
-BaseRegionObserver.stop(CoprocessorEnvironment e) 
+BaseMasterAndRegionObserver.stop(CoprocessorEnvironment ctx) 
 
 
 void
@@ -406,15 +406,15 @@
 
 
 void
-BaseRegionServerObserver.stop(CoprocessorEnvironment env) 
+MultiRowMutationEndpoint.stop(CoprocessorEnvironment env) 
 
 
 void
-BaseMasterAndRegionObserver.stop(CoprocessorEnvironment ctx) 
+BaseMasterObserver.stop(CoprocessorEnvironment ctx) 
 
 
 void
-BaseMasterObserver.stop(CoprocessorEnvironment ctx) 
+BaseRegionServerObserver.stop(CoprocessorEnvironment env) 
 
 
 void
@@ -444,11 +444,11 @@
 
 
 void
-BulkDeleteEndpoint.start(CoprocessorEnvironment env) 
+ZooKeeperScanPolicyObserver.start(CoprocessorEnvironment e) 
 
 
 void
-ZooKeeperScanPolicyObserver.start(CoprocessorEnvironment e) 
+BulkDeleteEndpoint.start(CoprocessorEnvironment env) 
 
 
 void
@@ -456,11 +456,11 @@
 
 
 void
-BulkDeleteEndpoint.stop(CoprocessorEnvironment env) 
+ZooKeeperScanPolicyObserver.stop(CoprocessorEnvironment e) 
 
 
 void
-ZooKeeperScanPolicyObserver.stop(CoprocessorEnvironment e) 
+BulkDeleteEndpoint.stop(CoprocessorEnvironment env) 
 
 
 
@@ -589,19 +589,19 @@
 
 
 void
-SecureBulkLoadEndpoint.start(CoprocessorEnvironment env) 
+AccessController.start(CoprocessorEnvironment env) 
 
 
 void
-AccessController.start(CoprocessorEnvironment env) 
+SecureBulkLoadEndpoint.start(CoprocessorEnvironment env) 
 
 
 void
-SecureBulkLoadEndpoint.stop(CoprocessorEnvironment env) 
+AccessController.stop(CoprocessorEnvironment env) 
 
 
 void
-AccessController.stop(CoprocessorEnvironment env) 
+SecureBulkLoadEndpoint.stop(CoprocessorEnvironment env) 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
index e4c8960..fd60be4 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HBaseIOException.html
@@ -642,14 +642,14 @@
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-RegionNormalizer.computePlanForTable(TableName table)
-Computes next optimal normalization plan.
+SimpleRegionNormalizer.computePlanForTable(TableName table)
+Computes next most "urgent" normalization action on the 
table.
 
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-SimpleRegionNormalizer.computePlanForTable(TableName table)
-Computes next most "urgent" normalization action on the 
table.
+RegionNormalizer.computePlanForTable(TableName table)
+Computes next optimal normalization plan.
 
 
 



[35/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html 
b/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
index 97174a2..e386993 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/HealthChecker.HealthCheckerExitStatus.html
@@ -258,7 +258,7 @@ the order they are declared.
 
 
 values
-public static HealthChecker.HealthCheckerExitStatus[] values()
+public static HealthChecker.HealthCheckerExitStatus[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -275,7 +275,7 @@ for (HealthChecker.HealthCheckerExitStatus c : 
HealthChecker.HealthCheckerExitSt
 
 
 valueOf
-public static HealthChecker.HealthCheckerExitStatus valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static HealthChecker.HealthCheckerExitStatus valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html 
b/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
index 41fb732..a0339eb 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
@@ -249,7 +249,7 @@ the order they are declared.
 
 
 values
-public static KeepDeletedCells[] values()
+public static KeepDeletedCells[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -266,7 +266,7 @@ for (KeepDeletedCells c : KeepDeletedCells.values())
 
 
 valueOf
-public static KeepDeletedCells valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static KeepDeletedCells valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/KeyValueUtil.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValueUtil.html 
b/devapidocs/org/apache/hadoop/hbase/KeyValueUtil.html
index e665701..e24fc2f 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeyValueUtil.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeyValueUtil.html
@@ -627,7 +627,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
int qlength)
 Create a KeyValue for the specified row, family and 
qualifier that would be
  larger than or equal to all other possible KeyValues that have the same
- row, family, qualifier. Used for reseeking.
+ row, family, qualifier. Used for reseeking. Should NEVER be returned to a 
client.
 Parameters:row - row 
keyroffset - row offsetrlength - 
row lengthfamily - family 
namefoffset - family offsetflength 
- family lengthqualifier - column 
qualifierqoffset - qualifier 
offsetqlength - qualifier length
 Returns:Last possible key on passed 
row, family, qualifier.
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
index 7a0cb69..7e2aaf7 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Abortable.html
@@ -286,11 +286,11 @@
 
 
 private Abortable
-SimpleRpcScheduler.abortable 
+RpcExecutor.abortable 
 
 
 private Abortable
-RpcExecutor.abortable 
+SimpleRpcScheduler.abortable 
 
 
 
@@ -531,24 +531,24 @@
 
 
 RpcScheduler
-FifoRpcSchedulerFactory.create(org.apache.hadoop.conf

[17/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
index 8b24449..6b89aff 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/ObserverContext.html
@@ -185,118 +185,118 @@
 
 
 void
-MasterObserver.postAbortProcedure(ObserverContext ctx)
-Called after a abortProcedure request has been 
processed.
-
+BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
-BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
+BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
-BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
+MasterObserver.postAbortProcedure(ObserverContext ctx)
+Called after a abortProcedure request has been 
processed.
+
 
 
 void
-MasterObserver.postAddColumn(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated. 
 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
+BaseMasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
+Deprecated. 
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
+ Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContext ctx,
+MasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
 Deprecated. 
 As of release 2.0.0, this will be removed in HBase 3.0.0
  (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+ Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
 
 
 
 
 void
-MasterObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily)
-Called after the new column family has been created.
-
+  HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
+MasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily) 
+  HColumnDescriptor columnFamily)
+Called after the new column family has been created.
+
 
 
 void
-MasterObserver.postAddColumnHandler(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645). Use
- MasterObserver.postCompletedAddColumnFamilyAction(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated. 
 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnH

[52/52] hbase-site git commit: INFRA-10751 Empty commit

2016-07-12 Thread busbey
INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/50e4c45b
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/50e4c45b
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/50e4c45b

Branch: refs/heads/asf-site
Commit: 50e4c45bfd921f9380f342de6490d11bbb682334
Parents: 2784982
Author: Sean Busbey 
Authored: Tue Jul 12 10:40:02 2016 -0500
Committer: Sean Busbey 
Committed: Tue Jul 12 10:40:02 2016 -0500

--

--




[48/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/org/apache/hadoop/hbase/HConstants.html
index 8699e9c..33c41c4 100644
--- a/apidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/org/apache/hadoop/hbase/HConstants.html
@@ -1327,7 +1327,10 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 static long
 OLDEST_TIMESTAMP
-Timestamp to use when we want to refer to the oldest 
cell.
+Deprecated. 
+Should not be public since hbase-1.3.0. For internal use 
only. Move internal to
+ Scanners flagged as special timestamp value never to be returned as timestamp 
on a Cell.
+
 
 
 
@@ -3207,8 +3210,13 @@ public static final byte[] 
 
 OLDEST_TIMESTAMP
-public static final long OLDEST_TIMESTAMP
-Timestamp to use when we want to refer to the oldest 
cell.
+http://docs.oracle.com/javase/7/docs/api/java/lang/Deprecated.html?is-external=true";
 title="class or interface in java.lang">@Deprecated
+public static final long OLDEST_TIMESTAMP
+Deprecated. Should not 
be public since hbase-1.3.0. For internal use only. Move internal to
+ Scanners flagged as special timestamp value never to be returned as timestamp 
on a Cell.
+Timestamp to use when we want to refer to the oldest cell.
+ Special! Used in fake Cells only. Should never be the timestamp on an actual 
Cell returned to
+ a client.
 See Also:Constant
 Field Values
 
 
@@ -3218,7 +3226,7 @@ public static final byte[] 
 
 LATEST_TIMESTAMP_BYTES
-public static final byte[] LATEST_TIMESTAMP_BYTES
+public static final byte[] LATEST_TIMESTAMP_BYTES
 LATEST_TIMESTAMP in bytes form
 
 
@@ -3228,7 +3236,7 @@ public static final byte[] 
 
 ALL_VERSIONS
-public static final int ALL_VERSIONS
+public static final int ALL_VERSIONS
 Define for 'return-all-versions'.
 See Also:Constant
 Field Values
 
@@ -3239,7 +3247,7 @@ public static final byte[] 
 
 FOREVER
-public static final int FOREVER
+public static final int FOREVER
 Unlimited time-to-live.
 See Also:Constant
 Field Values
 
@@ -3250,7 +3258,7 @@ public static final byte[] 
 
 WEEK_IN_SECONDS
-public static final int WEEK_IN_SECONDS
+public static final int WEEK_IN_SECONDS
 Seconds in a week
 See Also:Constant
 Field Values
 
@@ -3261,7 +3269,7 @@ public static final byte[] 
 
 DAY_IN_SECONDS
-public static final int DAY_IN_SECONDS
+public static final int DAY_IN_SECONDS
 Seconds in a day, hour and minute
 See Also:Constant
 Field Values
 
@@ -3272,7 +3280,7 @@ public static final byte[] 
 
 HOUR_IN_SECONDS
-public static final int HOUR_IN_SECONDS
+public static final int HOUR_IN_SECONDS
 See Also:Constant
 Field Values
 
 
@@ -3282,7 +3290,7 @@ public static final byte[] 
 
 MINUTE_IN_SECONDS
-public static final int MINUTE_IN_SECONDS
+public static final int MINUTE_IN_SECONDS
 See Also:Constant
 Field Values
 
 
@@ -3292,7 +3300,7 @@ public static final byte[] 
 
 NAME
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String NAME
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String NAME
 See Also:Constant
 Field Values
 
 
@@ -3302,7 +3310,7 @@ public static final byte[] 
 
 VERSIONS
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String VERSIONS
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String VERSIONS
 See Also:Constant
 Field Values
 
 
@@ -3312,7 +3320,7 @@ public static final byte[] 
 
 IN_MEMORY
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String IN_MEMORY
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String IN_MEMORY
 See Also:Constant
 Field Values
 
 
@@ -3322,7 +3330,7 @@ public static final byte[] 
 
 METADATA
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String METADATA
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String METADATA
 See Also:Constant
 Field Values
 
 
@@ -3332,7 +3340,7 @@ public static final byte[] 
 
 CONFIGURATION
-public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CONFIGURATION
+public static final http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="cla

[38/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/deprecated-list.html
--
diff --git a/devapidocs/deprecated-list.html b/devapidocs/deprecated-list.html
index 6822681..19657e7 100644
--- a/devapidocs/deprecated-list.html
+++ b/devapidocs/deprecated-list.html
@@ -238,50 +238,56 @@
 org.apache.hadoop.hbase.http.HttpServer.Builder.name
 
 
-org.apache.hadoop.hbase.http.HttpServer.Builder.port
+org.apache.hadoop.hbase.HConstants.OLDEST_TIMESTAMP
+Should not be public since hbase-1.3.0. For internal use 
only. Move internal to
+ Scanners flagged as special timestamp value never to be returned as timestamp 
on a Cell.
+
 
 
-org.apache.hadoop.hbase.regionserver.KeyPrefixRegionSplitPolicy.PREFIX_LENGTH_KEY_DEPRECATED
+org.apache.hadoop.hbase.http.HttpServer.Builder.port
 
 
-org.apache.hadoop.hbase.KeyValue.RAW_COMPARATOR
+org.apache.hadoop.hbase.regionserver.KeyPrefixRegionSplitPolicy.PREFIX_LENGTH_KEY_DEPRECATED
 
 
-org.apache.hadoop.hbase.client.Scan.SCAN_ATTRIBUTES_METRICS_DATA
+org.apache.hadoop.hbase.KeyValue.RAW_COMPARATOR
 
 
+org.apache.hadoop.hbase.client.Scan.SCAN_ATTRIBUTES_METRICS_DATA
+
+
 org.apache.hadoop.hbase.client.Scan.SCAN_ATTRIBUTES_METRICS_ENABLE
 since 1.0.0. Use Scan.setScanMetricsEnabled(boolean)
 
 
-
+
 org.apache.hadoop.hbase.regionserver.wal.WALEdit.scopes
 Legacy
 
 
-
+
 org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationGlobalSourceSource.shippedKBsCounter
 
-
+
 org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSourceImpl.shippedKBsKey
 
-
+
 org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils.SNAPSHOT_TIMEOUT_MILLIS_DEFAULT
 Use SnapshotDescriptionUtils.DEFAULT_MAX_WAIT_TIME
 instead.
 
 
-
+
 org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils.SNAPSHOT_TIMEOUT_MILLIS_KEY
 Use SnapshotDescriptionUtils.MASTER_SNAPSHOT_TIMEOUT_MILLIS
 instead.
 
 
-
+
 org.apache.hadoop.hbase.replication.regionserver.MetricsReplicationSourceSource.SOURCE_SHIPPED_KBS
 
-
+
 org.apache.hadoop.hbase.mapreduce.SimpleTotalOrderPartitioner.START
 
-
+
 org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.tableZNode
 
 
@@ -369,13 +375,13 @@
 org.apache.hadoop.hbase.mapreduce.CellCreator.create(byte[],
 int, int, byte[], int, int, byte[], int, int, long, byte[], int, int, 
String)
 
 
-org.apache.hadoop.hbase.regionserver.FifoRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
-org.apache.hadoop.hbase.regionserver.RpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.SimpleRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
-org.apache.hadoop.hbase.regionserver.SimpleRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
+org.apache.hadoop.hbase.regionserver.FifoRpcSchedulerFactory.create(Configuration,
 PriorityFunction)
 
 
 org.apache.hadoop.hbase.client.Admin.deleteColumn(TableName,
 byte[])
@@ -400,13 +406,13 @@
 org.apache.hadoop.hbase.KeyValueUtil.ensureKeyValues(List)
 
 
-org.apache.hadoop.hbase.regionserver.RegionMergeTransaction.execute(Server,
 RegionServerServices)
-use #execute(Server, RegionServerServices, 
User)
+org.apache.hadoop.hbase.regionserver.SplitTransaction.execute(Server,
 RegionServerServices)
+use #execute(Server, RegionServerServices, User);  as of 
1.0.2, remove in 3.0
 
 
 
-org.apache.hadoop.hbase.regionserver.SplitTransaction.execute(Server,
 RegionServerServices)
-use #execute(Server, RegionServerServices, User);  as of 
1.0.2, remove in 3.0
+org.apache.hadoop.hbase.regionserver.RegionMergeTransaction.execute(Server,
 RegionServerServices)
+use #execute(Server, RegionServerServices, 
User)
 
 
 
@@ -418,15 +424,15 @@
 org.apache.hadoop.hbase.rest.client.RemoteHTable.exists(List)
 
 
-org.apache.hadoop.hbase.filter.Filter.filterRowKey(byte[],
 int, int)
+org.apache.hadoop.hbase.filter.FilterBase.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this will be removed in HBase 3.0.0.
- Instead use Filter.filterRowKey(Cell)
+ Instead use FilterBase.filterRowKey(Cell)
 
 
 
-org.apache.hadoop.hbase.filter.FilterBase.filterRowKey(byte[],
 int, int)
+org.apache.hadoop.hbase.filter.Filter.filterRowKey(byte[],
 int, int)
 As of release 2.0.0, this will be removed in HBase 3.0.0.
- Instead use FilterBase.filterRowKey(Cell)
+ Instead use Filter.filterRowKey(Cell)
 
 
 
@@ -523,10 +529,10 @@
 
 
 
-org.apache.hadoop.hbase.http.HttpServer.getPort()
+org.apache.hadoop.hbase.http.InfoServer.getPort()
 
 
-org.apache.hadoop.hbase.http.InfoServer.getPort()
+org.apache.hadoop.hbase.http.HttpServer.getPort()
 
 
 org.apache.hadoop.hbase.CellUtil.getQualifierBufferShallowCopy(Cell)
@@ -582,13 +588,13 @@
 
 
 
-org.apache.hadoop.hbase.util.Bytes.getSize()
-use Bytes.getLength()
 instead
+org.apache.hadoop.hbase.io.ImmutableBytes

[43/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/overview-tree.html
--
diff --git a/apidocs/overview-tree.html b/apidocs/overview-tree.html
index 09aebe0..1325409 100644
--- a/apidocs/overview-tree.html
+++ b/apidocs/overview-tree.html
@@ -844,23 +844,23 @@
 org.apache.hadoop.hbase.ProcedureState
 org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
 org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
-org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
-org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
 org.apache.hadoop.hbase.filter.FilterList.Operator
+org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
 org.apache.hadoop.hbase.filter.Filter.ReturnCode
-org.apache.hadoop.hbase.client.CompactType
+org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
+org.apache.hadoop.hbase.regionserver.BloomType
+org.apache.hadoop.hbase.quotas.QuotaScope
+org.apache.hadoop.hbase.quotas.ThrottleType
+org.apache.hadoop.hbase.quotas.ThrottlingException.Type
+org.apache.hadoop.hbase.quotas.QuotaType
 org.apache.hadoop.hbase.client.IsolationLevel
-org.apache.hadoop.hbase.client.SnapshotType
+org.apache.hadoop.hbase.client.CompactType
+org.apache.hadoop.hbase.client.Consistency
 org.apache.hadoop.hbase.client.MasterSwitchType
 org.apache.hadoop.hbase.client.CompactionState
+org.apache.hadoop.hbase.client.SnapshotType
 org.apache.hadoop.hbase.client.Durability
-org.apache.hadoop.hbase.client.Consistency
 org.apache.hadoop.hbase.client.security.SecurityCapability
-org.apache.hadoop.hbase.quotas.ThrottlingException.Type
-org.apache.hadoop.hbase.quotas.ThrottleType
-org.apache.hadoop.hbase.quotas.QuotaType
-org.apache.hadoop.hbase.quotas.QuotaScope
-org.apache.hadoop.hbase.regionserver.BloomType
 
 
 



[39/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 05e93d2..a1c7e71 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 ©2007 - 2016 The Apache Software Foundation
 
-  File: 1784,
- Errors: 11571,
+  File: 1783,
+ Errors: 11578,
  Warnings: 0,
  Infos: 0
   
@@ -4647,7 +4647,7 @@ under the License.
   0
 
 
-  14
+  18
 
   
   
@@ -5179,7 +5179,7 @@ under the License.
   0
 
 
-  49
+  48
 
   
   
@@ -8488,20 +8488,6 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.master.cleaner.ReplicationZKLockCleanerChore.java";>org/apache/hadoop/hbase/master/cleaner/ReplicationZKLockCleanerChore.java
-
-
-  0
-
-
-  0
-
-
-  0
-
-  
-  
-
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.snapshot.SnapshotManifestV2.java";>org/apache/hadoop/hbase/snapshot/SnapshotManifestV2.java
 
 
@@ -10387,7 +10373,7 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
@@ -10765,7 +10751,7 @@ under the License.
   0
 
 
-  81
+  80
 
   
   
@@ -16995,7 +16981,7 @@ under the License.
   0
 
 
-  1
+  7
 
   
   
@@ -19002,7 +18988,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.procedure.Subprocedure.java";>org/apache/hadoop/hbase/procedure/Subprocedure.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.java";>org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java
 
 
   0
@@ -19011,12 +18997,12 @@ under the License.
   0
 
 
-  9
+  0
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.protobuf.generated.RegionServerStatusProtos.java";>org/apache/hadoop/hbase/protobuf/generated/RegionServerStatusProtos.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.procedure.Subprocedure.java";>org/apache/hadoop/hbase/procedure/Subprocedure.java
 
 
   0
@@ -19025,7 +19011,7 @@ under the License.
   0
 
 
-  0
+  9
 
   
   
@@ -19067,7 +19053,7 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
@@ -19375,7 +19361,7 @@ under the License.
   0
 
 
-  21
+  22
 
   
   
@@ -20271,7 +20257,7 @@ under the License.
   0
 
 
-  9
+  3
 
   
   
@@ -20327,7 +20313,7 @@ under the License.
   0
 
 
-  5
+  6
 
   
   
@@ -20369,7 +20355,7 @@ under the License.
   0
 
 
-  6
+  5
 
   
   
@@ -21335,7 +21321,7 @@ under the License.
   0
 
 
-  11
+  13
 
   
   

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/278

[46/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/org/apache/hadoop/hbase/client/CompactionState.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/CompactionState.html 
b/apidocs/org/apache/hadoop/hbase/client/CompactionState.html
index 60433b2..e634660 100644
--- a/apidocs/org/apache/hadoop/hbase/client/CompactionState.html
+++ b/apidocs/org/apache/hadoop/hbase/client/CompactionState.html
@@ -245,7 +245,7 @@ the order they are declared.
 
 
 values
-public static CompactionState[] values()
+public static CompactionState[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -262,7 +262,7 @@ for (CompactionState c : CompactionState.values())
 
 
 valueOf
-public static CompactionState valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static CompactionState valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/org/apache/hadoop/hbase/client/Durability.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Durability.html 
b/apidocs/org/apache/hadoop/hbase/client/Durability.html
index af1f718..0f065d5 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Durability.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Durability.html
@@ -280,7 +280,7 @@ the order they are declared.
 
 
 values
-public static Durability[] values()
+public static Durability[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -297,7 +297,7 @@ for (Durability c : Durability.values())
 
 
 valueOf
-public static Durability valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static Durability valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html 
b/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
index 7dc6ab3..7cc0a93 100644
--- a/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
+++ b/apidocs/org/apache/hadoop/hbase/client/IsolationLevel.html
@@ -243,7 +243,7 @@ the order they are declared.
 
 
 values
-public static IsolationLevel[] values()
+public static IsolationLevel[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -260,7 +260,7 @@ for (IsolationLevel c : IsolationLevel.values())
 
 
 valueOf
-public static IsolationLevel valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static IsolationLevel valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html 
b/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html
index 81ef17a..bcea5a9 100644
--- a/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html
+++ b/apidocs/org/apache/hadoop/hbase/client/SnapshotType.html
@@ -233,7 +233,7 @@ the order they are declared.
 
 
 values
-public static SnapshotType[] values()
+public static SnapshotType[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the

[14/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html 
b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
index 435b828..1eef354 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/class-use/Filter.html
@@ -158,11 +158,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 Filter
-Scan.getFilter() 
+Query.getFilter() 
 
 
 Filter
-Query.getFilter() 
+Scan.getFilter() 
 
 
 
@@ -174,19 +174,19 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
+Query
+Query.setFilter(Filter filter)
+Apply the specified server-side filter when performing the 
Query.
+
+
+
 Scan
 Scan.setFilter(Filter filter) 
 
-
+
 Get
 Get.setFilter(Filter filter) 
 
-
-Query
-Query.setFilter(Filter filter)
-Apply the specified server-side filter when performing the 
Query.
-
-
 
 
 
@@ -413,16 +413,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-private Filter
-WhileMatchFilter.filter 
+(package private) Filter
+FilterWrapper.filter 
 
 
 private Filter
-SkipFilter.filter 
+WhileMatchFilter.filter 
 
 
-(package private) Filter
-FilterWrapper.filter 
+private Filter
+SkipFilter.filter 
 
 
 private Filter
@@ -452,65 +452,65 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 static Filter
-DependentColumnFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnPaginationFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-PrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+InclusiveStopFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-FamilyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+FilterBase.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments)
+Given the filter's arguments it constructs the filter
+
 
 
 static Filter
-InclusiveStopFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+KeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-FirstKeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ValueFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-TimestampsFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+PrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-KeyOnlyFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+SingleColumnValueExcludeFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
 
 
 static Filter
-MultipleColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/javase/7/docs/api/java/util/ArrayList.html?is-external=true";
 title="class or interface in 
java.util">ArrayList filterArguments) 
+ColumnPrefixFilter.createFilterFromArguments(http://docs.oracle.com/ja

[31/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
index 1b79305..cecaaab 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HColumnDescriptor.html
@@ -561,193 +561,181 @@ service.
 
 
 void
-MasterObserver.postAddColumn(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated. 
 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
+BaseMasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
+Deprecated. 
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
+ Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContext ctx,
+MasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
 Deprecated. 
 As of release 2.0.0, this will be removed in HBase 3.0.0
  (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+ Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
 
 
 
 
 void
-MasterObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily)
-Called after the new column family has been created.
-
+  HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
+MasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily) 
+  HColumnDescriptor columnFamily)
+Called after the new column family has been created.
+
 
 
 void
-MasterObserver.postAddColumnHandler(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645). Use
- MasterObserver.postCompletedAddColumnFamilyAction(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated. 
 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
+BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
-Deprecated. 
+Deprecated. 
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645). Use
+ BaseMasterObserver.postCompletedAddColumnFamilyAction(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void
-BaseMasterObserver.postAddColumnHandler(ObserverContext ctx,
+MasterObserver.postAddColumnHandler(ObserverContext ctx,

[20/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
index e235b30..9dd8373 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Row.html
@@ -251,11 +251,11 @@
 
 
 int
-RegionCoprocessorServiceExec.compareTo(Row o) 
+RowMutations.compareTo(Row i) 
 
 
 int
-Get.compareTo(Row other) 
+RegionCoprocessorServiceExec.compareTo(Row o) 
 
 
 int
@@ -263,11 +263,11 @@
 
 
 int
-Increment.compareTo(Row i) 
+Get.compareTo(Row other) 
 
 
 int
-RowMutations.compareTo(Row i) 
+Increment.compareTo(Row i) 
 
 
 private boolean
@@ -373,14 +373,14 @@
 
 
 void
-Table.batch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
+HTable.batch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
   http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[] results)
 Method that does a batch call on Deletes, Gets, Puts, 
Increments and Appends.
 
 
 
 void
-HTable.batch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
+Table.batch(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
   http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[] results)
 Method that does a batch call on Deletes, Gets, Puts, 
Increments and Appends.
 
@@ -392,7 +392,7 @@
 
 
  void
-Table.batchCallback(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
+HTable.batchCallback(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
   http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[] results,
   Batch.Callback callback)
 Same as Table.batch(List,
 Object[]), but with a callback.
@@ -400,7 +400,7 @@
 
 
  void
-HTable.batchCallback(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
+Table.batchCallback(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List actions,
   http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object[] results,
   Batch.Callback callback)
 Same as Table.batch(List,
 Object[]), but with a callback.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/client/class-use/RowMutations.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RowMutations.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RowMutations.html
index 5d12e66..91c48e1 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/RowMutations.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/RowMutations.html
@@ -119,23 +119,23 @@ service.
 
 
 boolean
-Table.checkAndMutate(byte[] row,
+HTable.checkAndMutate(byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
 byte[] value,
-RowMutations mutation)
+RowMutations rm)
 Atomically checks if a row/family/qualifier value matches 
the expected value.
 
 
 
 boolean
-HTable.checkAndMutate(byte[] row,
+Table.checkAndMutate(byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
 byte[] value,
-RowMutations rm)
+RowMutations mutation)
 Atomically checks if a row/family/qualifier value matches 
the expected value.
 
 
@@ -150,13 +150,13 @@ service.
 
 
 void
-Table.mutateRow(RowMutations rm)
+HTable.mutateRow(RowMutations rm)
 Performs multiple mutation

[34/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 82361c0..70f8a21 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -1259,32 +1259,32 @@ service.
 
 
 
-boolean
-KeyValue.KVComparator.matchingRowColumn(Cell left,
+static boolean
+CellUtil.matchingRowColumn(Cell left,
   Cell right)
-Deprecated. 
 Compares the row and column of two keyvalues for 
equality
 
 
 
-static boolean
-CellUtil.matchingRowColumn(Cell left,
+boolean
+KeyValue.KVComparator.matchingRowColumn(Cell left,
   Cell right)
+Deprecated. 
 Compares the row and column of two keyvalues for 
equality
 
 
 
-boolean
-KeyValue.KVComparator.matchingRows(Cell left,
+static boolean
+CellUtil.matchingRows(Cell left,
 Cell right)
-Deprecated. 
 Compares the row of two keyvalues for equality
 
 
 
-static boolean
-CellUtil.matchingRows(Cell left,
+boolean
+KeyValue.KVComparator.matchingRows(Cell left,
 Cell right)
+Deprecated. 
 Compares the row of two keyvalues for equality
 
 
@@ -1644,23 +1644,23 @@ service.
 
 
 
+Increment
+Increment.add(Cell cell)
+Add the specified KeyValue to this operation.
+
+
+
 Put
 Put.add(Cell kv)
 Add the specified KeyValue to this Put operation.
 
 
-
+
 Append
 Append.add(Cell cell)
 Add column and value to this Append operation.
 
 
-
-Increment
-Increment.add(Cell cell)
-Add the specified KeyValue to this operation.
-
-
 
 Delete
 Delete.addDeleteMarker(Cell kv)
@@ -1753,23 +1753,23 @@ service.
 Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
 
-Put
-Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
-
 Mutation
 Mutation.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map)
 Method for setting the put's familyMap
 
 
-
-Append
-Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
 
 Increment
 Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
+
+Put
+Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
+
+Append
+Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
 
 
 
@@ -1801,23 +1801,23 @@ service.
 
 
 
+http://docs.oracle.com/javase/7/docs/api/java/math/BigDecimal.html?is-external=true";
 title="class or interface in java.math">BigDecimal
+BigDecimalColumnInterpreter.getValue(byte[] colFamily,
+byte[] colQualifier,
+Cell kv) 
+
+
 http://docs.oracle.com/javase/7/docs/api/java/lang/Double.html?is-external=true";
 title="class or interface in java.lang">Double
 DoubleColumnInterpreter.getValue(byte[] colFamily,
 byte[] colQualifier,
 Cell c) 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long
 LongColumnInterpreter.getValue(byte[] colFamily,
 byte[] colQualifier,
 Cell kv) 
 
-
-http://docs.o

[33/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
index e146e63..3085c8e 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/CellComparator.html
@@ -246,30 +246,30 @@
 
 
 DataBlockEncoder.EncodedSeeker
-CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
+DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-DataBlockEncoder.createSeeker(CellComparator comparator,
-HFileBlockDecodingContext decodingCtx)
-Create a HFileBlock seeker which find KeyValues within a 
block.
-
+PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
+HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
+CopyKeyDataBlockEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-PrefixKeyDeltaEncoder.createSeeker(CellComparator comparator,
+FastDiffDeltaEncoder.createSeeker(CellComparator comparator,
 HFileBlockDecodingContext decodingCtx) 
 
 
 DataBlockEncoder.EncodedSeeker
-DiffKeyDeltaEncoder.createSeeker(CellComparator comparator,
-HFileBlockDecodingContext decodingCtx) 
+DataBlockEncoder.createSeeker(CellComparator comparator,
+HFileBlockDecodingContext decodingCtx)
+Create a HFileBlock seeker which find KeyValues within a 
block.
+
 
 
 
@@ -299,30 +299,30 @@
 
 
 protected CellComparator
-HFileWriterImpl.comparator
-Key comparator.
-
-
-
-protected CellComparator
 HFile.WriterFactory.comparator 
 
-
+
 private CellComparator
-HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
-Needed doing lookup on blocks.
+HFileReaderImpl.comparator
+Key comparator
 
 
-
+
 protected CellComparator
 CompoundBloomFilterBase.comparator
 Comparator used to compare Bloom filter keys
 
 
+
+protected CellComparator
+HFileWriterImpl.comparator
+Key comparator.
+
+
 
 private CellComparator
-HFileReaderImpl.comparator
-Key comparator
+HFileBlockIndex.CellBasedKeyBlockIndexReader.comparator
+Needed doing lookup on blocks.
 
 
 
@@ -500,44 +500,44 @@
 
 
 private CellComparator
-StoreFileWriter.Builder.comparator 
+HStore.comparator 
 
 
 private CellComparator
-AbstractMemStore.comparator 
+Segment.comparator 
 
 
-protected CellComparator
-StripeMultiFileWriter.comparator 
+private CellComparator
+AbstractMemStore.comparator 
 
 
 private CellComparator
-Segment.comparator 
+StoreFileWriter.Builder.comparator 
 
 
-private CellComparator
-ScanInfo.comparator 
+protected CellComparator
+StripeMultiFileWriter.comparator 
 
 
 private CellComparator
-HStore.comparator 
+ScanInfo.comparator 
 
 
 protected CellComparator
-HRegion.RegionScannerImpl.comparator 
+StripeStoreFlusher.StripeFlushRequest.comparator 
 
 
 protected CellComparator
-StripeStoreFlusher.StripeFlushRequest.comparator 
+HRegion.RegionScannerImpl.comparator 
 
 
-protected CellComparator
-KeyValueHeap.KVScannerComparator.kvComparator 
-
-
 private CellComparator
 DefaultStoreFileManager.kvComparator 
 
+
+protected CellComparator
+KeyValueHeap.KVScannerComparator.kvComparator 
+
 
 private CellComparator
 ScanQueryMatcher.rowComparator
@@ -565,37 +565,37 @@
 
 
 CellComparator
-StoreFileReader.getComparator() 
+HStore.getComparator() 
 
 
 protected CellComparator
-AbstractMemStore.getComparator() 
-
-
-protected CellComparator
 Segment.getComparator()
 Returns the Cell comparator used by this segment
 
 
+
+CellComparator
+KeyValueHeap.KVScannerComparator.getComparator() 
+
 
 CellComparator
-ScanInfo.getComparator() 
+StoreFileReader.getComparator() 
 
 
-CellComparator
-Store.getComparator() 
+protected CellComparator
+AbstractMemStore.getComparator() 
 
 
 CellComparator
-KeyValueHeap.KVScannerComparator.getComparator() 
+ScanInfo.getComparator() 
 
 
-(package private) CellComparator
-StoreFileScanner.getComparator() 
+CellComparator
+Store.getComparator() 
 
 
-CellComparator
-HStore.getComparator() 
+(package private) CellComparator
+StoreFileScanner.getComparator() 
 
 
 
@@ -629,12 +629,6 @@
 
 
 
-protected void
-DefaultStoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
-Store store,
-CellComparator kvComparator) 
-
-
 protected abstract void
 StoreEngine.createComponents(org.apache.hadoop.conf.Configuration conf,
 Store store,
@@ -642,12 +636,18 @@
 Create the StoreEngi

[30/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
index b4e49aa..cd1abff 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionInfo.html
@@ -867,7 +867,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-ScannerCallableWithReplicas.getHRegionInfo() 
+AbstractRegionServerCallable.getHRegionInfo() 
 
 
 HRegionInfo
@@ -875,7 +875,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HRegionInfo
-AbstractRegionServerCallable.getHRegionInfo() 
+ScannerCallableWithReplicas.getHRegionInfo() 
 
 
 private HRegionInfo
@@ -1113,152 +1113,152 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-MasterObserver.postAssign(ObserverContext ctx,
-HRegionInfo regionInfo)
-Called after the region assignment has been requested.
-
-
-
-void
 BaseMasterAndRegionObserver.postAssign(ObserverContext ctx,
 HRegionInfo regionInfo) 
 
-
+
 void
 BaseMasterObserver.postAssign(ObserverContext ctx,
 HRegionInfo regionInfo) 
 
-
+
 void
-MasterObserver.postCompletedCreateTableAction(ObserverContext ctx,
-HTableDescriptor desc,
-HRegionInfo[] regions)
-Called after the createTable operation has been 
requested.
+MasterObserver.postAssign(ObserverContext ctx,
+HRegionInfo regionInfo)
+Called after the region assignment has been requested.
 
 
-
+
 void
 BaseMasterAndRegionObserver.postCompletedCreateTableAction(ObserverContext ctx,
 HTableDescriptor desc,
 HRegionInfo[] regions) 
 
-
+
 void
 BaseMasterObserver.postCompletedCreateTableAction(ObserverContext ctx,
 HTableDescriptor desc,
 HRegionInfo[] regions) 
 
-
+
 void
-MasterObserver.postCreateTable(ObserverContext ctx,
-  HTableDescriptor desc,
-  HRegionInfo[] regions)
+MasterObserver.postCompletedCreateTableAction(ObserverContext ctx,
+HTableDescriptor desc,
+HRegionInfo[] regions)
 Called after the createTable operation has been 
requested.
 
 
-
+
 void
 BaseMasterAndRegionObserver.postCreateTable(ObserverContext ctx,
   HTableDescriptor desc,
   HRegionInfo[] regions) 
 
-
+
 void
 BaseMasterObserver.postCreateTable(ObserverContext ctx,
   HTableDescriptor desc,
   HRegionInfo[] regions) 
 
+
+void
+MasterObserver.postCreateTable(ObserverContext ctx,
+  HTableDescriptor desc,
+  HRegionInfo[] regions)
+Called after the createTable operation has been 
requested.
+
+
 
 void
-MasterObserver.postCreateTableHandler(ObserverContext ctx,
+BaseMasterAndRegionObserver.postCreateTableHandler(ObserverContext ctx,
 HTableDescriptor desc,
 HRegionInfo[] regions)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
-   (https://issues.apache.org/jira/browse/HBASE-15575";>HBASE-15575).
-   Use MasterObserver.postCompletedCreateTableAction(ObserverContext,
 HTableDescriptor, HRegionInfo[])
-
+Deprecated. 
 
 
 
 void
-BaseMasterAndRegionObserver.postCreateTableHandler(ObserverContext ctx,
+BaseMasterObserver.postCreateTableHandler(ObserverContext ctx,
 HTableDescriptor desc,
 HRegionInfo[] regions)
-Deprecated. 
+Deprecated. 
+As of release 2.0.0, this will be removed in HBase 3.0.0
+   (https://issues.apache.org/jira/browse/HBASE-15575";>HBASE-15575).
+   Use BaseMasterObserver.postCompleted

[50/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index ccb2ddb..cb3135f 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,16 +5,16 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
 /Producer (Apache HBase Team)
-/CreationDate (D:20160707144306+00'00')
-/ModDate (D:20160707144306+00'00')
+/CreationDate (D:20160712144528+00'00')
+/ModDate (D:20160712144528+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
 /Names 25 0 R
-/Outlines 4007 0 R
-/PageLabels 4212 0 R
+/Outlines 4008 0 R
+/PageLabels 4213 0 R
 /PageMode /UseOutlines
 /ViewerPreferences [/FitWindow]
 >>
@@ -22,7 +22,7 @@ endobj
 3 0 obj
 << /Type /Pages
 /Count 662
-/Kids [7 0 R 13 0 R 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 39 0 R 43 0 R 47 0 R 58 
0 R 62 0 R 64 0 R 66 0 R 68 0 R 75 0 R 78 0 R 80 0 R 85 0 R 88 0 R 90 0 R 92 0 
R 101 0 R 107 0 R 112 0 R 114 0 R 131 0 R 137 0 R 144 0 R 146 0 R 150 0 R 153 0 
R 164 0 R 172 0 R 188 0 R 192 0 R 196 0 R 198 0 R 202 0 R 208 0 R 210 0 R 212 0 
R 214 0 R 216 0 R 219 0 R 225 0 R 227 0 R 229 0 R 231 0 R 233 0 R 235 0 R 237 0 
R 239 0 R 243 0 R 247 0 R 249 0 R 251 0 R 253 0 R 255 0 R 257 0 R 259 0 R 261 0 
R 267 0 R 270 0 R 272 0 R 274 0 R 276 0 R 281 0 R 286 0 R 291 0 R 294 0 R 298 0 
R 313 0 R 324 0 R 331 0 R 341 0 R 352 0 R 357 0 R 359 0 R 361 0 R 366 0 R 380 0 
R 385 0 R 388 0 R 393 0 R 397 0 R 408 0 R 420 0 R 435 0 R 441 0 R 443 0 R 445 0 
R 452 0 R 463 0 R 474 0 R 485 0 R 488 0 R 491 0 R 495 0 R 499 0 R 502 0 R 505 0 
R 507 0 R 510 0 R 514 0 R 516 0 R 520 0 R 524 0 R 530 0 R 534 0 R 536 0 R 542 0 
R 544 0 R 548 0 R 556 0 R 558 0 R 561 0 R 564 0 R 567 0 R 570 0 R 585 0 R 592 0 
R 599 0 R 610 0 R 617 0 R 626 0 R 634 0
  R 637 0 R 641 0 R 644 0 R 657 0 R 665 0 R 671 0 R 676 0 R 680 0 R 682 0 R 696 
0 R 708 0 R 714 0 R 720 0 R 723 0 R 731 0 R 739 0 R 744 0 R 749 0 R 754 0 R 756 
0 R 758 0 R 760 0 R 768 0 R 777 0 R 781 0 R 788 0 R 796 0 R 802 0 R 806 0 R 813 
0 R 817 0 R 822 0 R 830 0 R 832 0 R 836 0 R 847 0 R 852 0 R 854 0 R 857 0 R 861 
0 R 867 0 R 870 0 R 882 0 R 886 0 R 891 0 R 899 0 R 904 0 R 908 0 R 912 0 R 914 
0 R 917 0 R 919 0 R 923 0 R 925 0 R 928 0 R 933 0 R 937 0 R 942 0 R 946 0 R 949 
0 R 951 0 R 958 0 R 962 0 R 967 0 R 980 0 R 984 0 R 988 0 R 993 0 R 995 0 R 
1004 0 R 1007 0 R 1012 0 R 1015 0 R 1024 0 R 1027 0 R 1033 0 R 1040 0 R 1043 0 
R 1045 0 R 1054 0 R 1056 0 R 1058 0 R 1061 0 R 1063 0 R 1065 0 R 1067 0 R 1069 
0 R 1071 0 R 1074 0 R 1077 0 R 1082 0 R 1085 0 R 1087 0 R 1089 0 R 1091 0 R 
1096 0 R 1105 0 R 1108 0 R 1110 0 R 1112 0 R 1117 0 R 1119 0 R 1122 0 R 1124 0 
R 1126 0 R 1128 0 R 1131 0 R 1137 0 R 1142 0 R 1149 0 R 1154 0 R 1168 0 R 1179 
0 R 1184 0 R 1196 0 R 1205 0 R 1221 0 R 1225 0 R 1
 235 0 R 1248 0 R 1251 0 R 1263 0 R 1272 0 R 1280 0 R 1284 0 R 1293 0 R 1298 0 
R 1302 0 R 1308 0 R 1314 0 R 1321 0 R 1329 0 R 1331 0 R 1342 0 R 1344 0 R 1349 
0 R 1353 0 R 1358 0 R 1368 0 R 1374 0 R 1380 0 R 1382 0 R 1384 0 R 1397 0 R 
1404 0 R 1413 0 R 1419 0 R 1433 0 R 1441 0 R 1445 0 R 1454 0 R 1462 0 R 1470 0 
R 1476 0 R 1480 0 R 1483 0 R 1485 0 R 1494 0 R 1497 0 R 1504 0 R 1508 0 R 1511 
0 R 1519 0 R 1523 0 R 1526 0 R 1528 0 R 1536 0 R 1543 0 R 1549 0 R 1554 0 R 
1558 0 R 1561 0 R 1567 0 R 1572 0 R 1577 0 R 1579 0 R 1581 0 R 1584 0 R 1586 0 
R 1595 0 R 1598 0 R 1604 0 R 1611 0 R 1615 0 R 1621 0 R 1624 0 R 1626 0 R 1631 
0 R 1634 0 R 1636 0 R 1638 0 R 1640 0 R 1647 0 R 1657 0 R 1662 0 R 1669 0 R 
1673 0 R 1675 0 R 1677 0 R 1679 0 R 1682 0 R 1684 0 R 1686 0 R 1688 0 R 1692 0 
R 1696 0 R 1705 0 R 1707 0 R 1709 0 R 1711 0 R 1713 0 R 1719 0 R 1721 0 R 1726 
0 R 1728 0 R 1730 0 R 1737 0 R 1742 0 R 1746 0 R 1750 0 R 1753 0 R 1756 0 R 
1760 0 R 1762 0 R 1765 0 R 1767 0 R 1769 0 R 1771 0 R 1775 0 R
  1777 0 R 1781 0 R 1783 0 R 1785 0 R 1787 0 R 1789 0 R 1793 0 R 1796 0 R 1798 
0 R 1800 0 R 1808 0 R 1818 0 R 1821 0 R 1837 0 R 1852 0 R 1856 0 R 1861 0 R 
1864 0 R 1867 0 R 1872 0 R 1874 0 R 1881 0 R 1883 0 R 1886 0 R 1888 0 R 1890 0 
R 1892 0 R 1894 0 R 1898 0 R 1900 0 R 1909 0 R 1916 0 R 1922 0 R 1934 0 R 1948 
0 R 1959 0 R 1978 0 R 1980 0 R 1982 0 R 1986 0 R 2003 0 R 2011 0 R 2018 0 R 
2027 0 R 2032 0 R 2041 0 R 2052 0 R 2058 0 R 2067 0 R 2080 0 R 2097 0 R 2107 0 
R 2110 0 R 2119 0 R 2134 0 R 2141 0 R 2144 0 R 2149 0 R 2154 0 R 2164 0 R 2172 
0 R 2175 0 R 2177 0 R 2181 0 R 2194 0 R 2202 0 R 2208 0 R 2212 0 R 2215 0 R 
2217 0 R 2219 0 R 2221 0 R 2223 0 R 2228 0 R 2230 0 R 2240 0 R 2250 0 R 2257 0 
R 2269 0 R 2274 0 R 2278 0 R 2290 0 R 2297 0 R 2303 0 R 2305 0 R 2316 0 R 2323 
0 R 2334 0 R 2338 0 R 2347 0 R 2354 0 R 2364 0 R 2372 0 R 2381 0 R 2387 0 R 
2392 0 R 2397 0 R 2400 0 R 2402 0 R 2408 0 R 2412 0 R 2416 0 R 2422 0 R 2429 0 
R 2434 0 R 2438 0 R 2447 0 R 2452 0 R 2457 0 R 2470 0 R 2477 0
  R 2481 0 R 2486 0 R 249

[49/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apache_hbase_reference_guide.pdfmarks
--
diff --git a/apache_hbase_reference_guide.pdfmarks 
b/apache_hbase_reference_guide.pdfmarks
index 4a50eb9..4ae0ec4 100644
--- a/apache_hbase_reference_guide.pdfmarks
+++ b/apache_hbase_reference_guide.pdfmarks
@@ -2,8 +2,8 @@
   /Author (Apache HBase Team)
   /Subject ()
   /Keywords ()
-  /ModDate (D:2016070710)
-  /CreationDate (D:2016070710)
+  /ModDate (D:20160712144733)
+  /CreationDate (D:20160712144733)
   /Creator (Asciidoctor PDF 1.5.0.alpha.6, based on Prawn 1.2.1)
   /Producer ()
   /DOCINFO pdfmark

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/constant-values.html
--
diff --git a/apidocs/constant-values.html b/apidocs/constant-values.html
index 5917519..b3b91dd 100644
--- a/apidocs/constant-values.html
+++ b/apidocs/constant-values.html
@@ -3058,6 +3058,34 @@
 
 
 
+org.apache.hadoop.hbase.io.TimeRange 
+
+Modifier and Type
+Constant Field
+Value
+
+
+
+
+
+public static final long
+INITIAL_MAX_TIMESTAMP
+9223372036854775807L
+
+
+
+
+public static final long
+INITIAL_MIN_TIMESTAMP
+0L
+
+
+
+
+
+
+
+
 org.apache.hadoop.hbase.io.crypto.KeyProvider 
 
 Modifier and Type

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/deprecated-list.html
--
diff --git a/apidocs/deprecated-list.html b/apidocs/deprecated-list.html
index 87379d6..47929fe 100644
--- a/apidocs/deprecated-list.html
+++ b/apidocs/deprecated-list.html
@@ -152,14 +152,20 @@
 
 
 
-org.apache.hadoop.hbase.client.Scan.SCAN_ATTRIBUTES_METRICS_DATA
+org.apache.hadoop.hbase.HConstants.OLDEST_TIMESTAMP
+Should not be public since hbase-1.3.0. For internal use 
only. Move internal to
+ Scanners flagged as special timestamp value never to be returned as timestamp 
on a Cell.
+
 
 
+org.apache.hadoop.hbase.client.Scan.SCAN_ATTRIBUTES_METRICS_DATA
+
+
 org.apache.hadoop.hbase.client.Scan.SCAN_ATTRIBUTES_METRICS_ENABLE
 since 1.0.0. Use Scan.setScanMetricsEnabled(boolean)
 
 
-
+
 org.apache.hadoop.hbase.mapreduce.SimpleTotalOrderPartitioner.START
 
 
@@ -455,6 +461,31 @@
 org.apache.hadoop.hbase.HTableDescriptor(String)
 
 
+org.apache.hadoop.hbase.io.TimeRange()
+This is made @InterfaceAudience.Private in the 2.0 line 
and above
+
+
+
+org.apache.hadoop.hbase.io.TimeRange(byte[])
+This is made @InterfaceAudience.Private in the 2.0 line 
and above
+
+
+
+org.apache.hadoop.hbase.io.TimeRange(byte[],
 byte[])
+This is made @InterfaceAudience.Private in the 2.0 line 
and above
+
+
+
+org.apache.hadoop.hbase.io.TimeRange(long)
+This is made @InterfaceAudience.Private in the 2.0 line 
and above
+
+
+
+org.apache.hadoop.hbase.io.TimeRange(long,
 long)
+This is made @InterfaceAudience.Private in the 2.0 line 
and above
+
+
+
 org.apache.hadoop.hbase.client.UnmodifyableHTableDescriptor()
 As of release 2.0.0. This will be removed in HBase 3.0.0.
   Use UnmodifyableHTableDescriptor.UnmodifyableHTableDescriptor(HTableDescriptor).

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/index-all.html
--
diff --git a/apidocs/index-all.html b/apidocs/index-all.html
index ac0ca75..949b813 100644
--- a/apidocs/index-all.html
+++ b/apidocs/index-all.html
@@ -1328,7 +1328,7 @@
  
 compare(long)
 - Method in class org.apache.hadoop.hbase.io.TimeRange
 
-Compare the timestamp to timerange
+Compare the timestamp to timerange.
 
 compare(byte[],
 byte[]) - Method in class org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator
  
@@ -7298,6 +7298,10 @@
 Obtain an authentication token, for the specified cluster, 
on behalf of the current user
  and add it to the credentials for the given map reduce job.
 
+INITIAL_MAX_TIMESTAMP
 - Static variable in class org.apache.hadoop.hbase.io.TimeRange
+ 
+INITIAL_MIN_TIMESTAMP
 - Static variable in class org.apache.hadoop.hbase.io.TimeRange
+ 
 initialChore()
 - Method in class org.apache.hadoop.hbase.ScheduledChore
 
 Override to run a task before we start looping.
@@ -9256,7 +9260,10 @@
  
 OLDEST_TIMESTAMP
 - Static variable in class org.apache.hadoop.hbase.HConstants
 
-Timestamp to use when we want to refer to the oldest 
cell.
+Deprecated.
+Should not be public since hbase-1.3.0. For internal use 
only. Move internal to
+ Scanners flagged as special timestamp value never to be returned as timestamp 
on a Cell.
+
 
 onChoreMissedStartTime(ScheduledChore)
 - Method in class org.apache.hadoop.hbase.ChoreService
  
@@ -13412,23 +13419,33 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 TimeRange()
 - Constructor for class org.apache.hadoop.hbase.io.TimeRange
 
-Default constructor.
+Deprecated.
+This is made @InterfaceAudience.Private in t

[28/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
index 9a59883..ee7979e 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HTableDescriptor.html
@@ -564,24 +564,24 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 HTableDescriptor
-Table.getTableDescriptor()
-Gets the table descriptor for 
this table.
-
-
-
-HTableDescriptor
 HTable.getTableDescriptor()
 Gets the table descriptor for 
this table.
 
 
-
+
 protected HTableDescriptor
 HBaseAdmin.CreateTableFuture.getTableDescriptor() 
 
-
+
 protected HTableDescriptor
 HBaseAdmin.TableFuture.getTableDescriptor() 
 
+
+HTableDescriptor
+Table.getTableDescriptor()
+Gets the table descriptor for 
this table.
+
+
 
 HTableDescriptor
 HTableWrapper.getTableDescriptor() 
@@ -981,170 +981,182 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 void
-MasterObserver.postCloneSnapshot(ObserverContext ctx,
+BaseMasterAndRegionObserver.postCloneSnapshot(ObserverContext ctx,
   
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot,
-  HTableDescriptor hTableDescriptor)
-Called after a snapshot clone operation has been 
requested.
-
+  HTableDescriptor hTableDescriptor) 
 
 
 void
-BaseMasterAndRegionObserver.postCloneSnapshot(ObserverContext ctx,
+BaseMasterObserver.postCloneSnapshot(ObserverContext ctx,
   
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot,
   HTableDescriptor hTableDescriptor) 
 
 
 void
-BaseMasterObserver.postCloneSnapshot(ObserverContext ctx,
+MasterObserver.postCloneSnapshot(ObserverContext ctx,
   
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot,
-  HTableDescriptor hTableDescriptor) 
+  HTableDescriptor hTableDescriptor)
+Called after a snapshot clone operation has been 
requested.
+
 
 
 void
-MasterObserver.postCompletedCreateTableAction(ObserverContext ctx,
+BaseMasterAndRegionObserver.postCompletedCreateTableAction(ObserverContext ctx,
 HTableDescriptor desc,
-HRegionInfo[] regions)
-Called after the createTable operation has been 
requested.
-
+HRegionInfo[] regions) 
 
 
 void
-BaseMasterAndRegionObserver.postCompletedCreateTableAction(ObserverContext ctx,
+BaseMasterObserver.postCompletedCreateTableAction(ObserverContext ctx,
 HTableDescriptor desc,
 HRegionInfo[] regions) 
 
 
 void
-BaseMasterObserver.postCompletedCreateTableAction(ObserverContext ctx,
+MasterObserver.postCompletedCreateTableAction(ObserverContext ctx,
 HTableDescriptor desc,
-HRegionInfo[] regions) 
+HRegionInfo[] regions)
+Called after the createTable operation has been 
requested.
+
 
 
 void
-MasterObserver.postCompletedModifyTableAction(ObserverContext ctx,
+BaseMasterAndRegionObserver.postCompletedModifyTableAction(ObserverContext ctx,
 TableName tableName,
-HTableDescriptor htd)
-Called after to modifying a table's properties.
-
+HTableDescriptor htd) 
 
 
 void
-BaseMasterAndRegionObserver.postCompletedModifyTableAction(ObserverContext ctx,
+BaseMasterObserver.postCompletedModifyTableAction(ObserverContext ctx,
 TableName tableName,
 HTabl

[41/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html
index c186ad9..ecf5411 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Query.html
@@ -211,9 +211,7 @@
 203  public Map 
getColumnFamilyTimeRange() {
 204return this.colFamTimeRangeMap;
 205  }
-206
-207
-208}
+206}
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/src-html/org/apache/hadoop/hbase/io/TimeRange.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/io/TimeRange.html 
b/apidocs/src-html/org/apache/hadoop/hbase/io/TimeRange.html
index bb93e09..c42d354 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/io/TimeRange.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/io/TimeRange.html
@@ -7,215 +7,234 @@
 
 
 001/*
-002 *
-003 * Licensed to the Apache Software 
Foundation (ASF) under one
-004 * or more contributor license 
agreements.  See the NOTICE file
-005 * distributed with this work for 
additional information
-006 * regarding copyright ownership.  The 
ASF licenses this file
-007 * to you under the Apache License, 
Version 2.0 (the
-008 * "License"); you may not use this file 
except in compliance
-009 * with the License.  You may obtain a 
copy of the License at
-010 *
-011 * 
http://www.apache.org/licenses/LICENSE-2.0
-012 *
-013 * Unless required by applicable law or 
agreed to in writing, software
-014 * distributed under the License is 
distributed on an "AS IS" BASIS,
-015 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
-016 * See the License for the specific 
language governing permissions and
-017 * limitations under the License.
-018 */
-019
-020package org.apache.hadoop.hbase.io;
-021
-022import java.io.IOException;
-023
-024import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-025import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-026import 
org.apache.hadoop.hbase.util.Bytes;
-027
-028/**
-029 * Represents an interval of version 
timestamps.
-030 * 

-031 * Evaluated according to minStamp <= timestamp < maxStamp -032 * or [minStamp,maxStamp) in interval notation. -033 *

-034 * Only used internally; should not be accessed directly by clients. -035 */ -036@InterfaceAudience.Public -037@InterfaceStability.Stable -038public class TimeRange { -039 static final long INITIAL_MIN_TIMESTAMP = 0l; -040 private static final long MIN_TIME = INITIAL_MIN_TIMESTAMP; -041 static final long INITIAL_MAX_TIMESTAMP = Long.MAX_VALUE; -042 static final long MAX_TIME = INITIAL_MAX_TIMESTAMP; -043 private long minStamp = MIN_TIME; -044 private long maxStamp = MAX_TIME; +002 * Licensed to the Apache Software Foundation (ASF) under one +003 * or more contributor license agreements. See the NOTICE file +004 * distributed with this work for additional information +005 * regarding copyright ownership. The ASF licenses this file +006 * to you under the Apache License, Version 2.0 (the +007 * "License"); you may not use this file except in compliance +008 * with the License. You may obtain a copy of the License at +009 * +010 * http://www.apache.org/licenses/LICENSE-2.0 +011 * +012 * Unless required by applicable law or agreed to in writing, software +013 * distributed under the License is distributed on an "AS IS" BASIS, +014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +015 * See the License for the specific language governing permissions and +016 * limitations under the License. +017 */ +018 +019package org.apache.hadoop.hbase.io; +020 +021import java.io.IOException; +022 +023import org.apache.hadoop.hbase.classification.InterfaceAudience; +024import org.apache.hadoop.hbase.classification.InterfaceStability; +025import org.apache.hadoop.hbase.util.Bytes; +026 +027/** +028 * Represents an interval of version timestamps. Presumes timestamps between +029 * {@link #INITIAL_MIN_TIMESTAMP} and {@link #INITIAL_MAX_TIMESTAMP} only. Gets freaked out if +030 * passed a timestamp that is < {@link #INITIAL_MIN_TIMESTAMP}, +031 *

+032 * Evaluated according to minStamp <= timestamp < maxStamp +033 * or [minStamp,maxStamp) in interval notation. +034 *

+035 * Only used internally; should not be accessed directly by clients. +036 *

Immutable. Thread-safe. +037 */ +038@InterfaceAudience.Public +039@InterfaceStability.Stable +040public class TimeRange { +041 public static final long INITIAL_MIN_TIMESTAMP = 0L; +042 public static final long INITIAL_MAX_TIMESTAMP = Long.MAX_VALUE; +043 private final long minStamp; +044 private final lon


[29/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
index 7a2b4de..93ea532 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/HRegionLocation.html
@@ -266,11 +266,11 @@ service.
 
 
 protected HRegionLocation
-RegionAdminServiceCallable.location 
+AbstractRegionServerCallable.location 
 
 
 protected HRegionLocation
-AbstractRegionServerCallable.location 
+RegionAdminServiceCallable.location 
 
 
 
@@ -298,11 +298,11 @@ service.
 
 
 protected HRegionLocation
-MultiServerCallable.getLocation() 
+AbstractRegionServerCallable.getLocation() 
 
 
 protected HRegionLocation
-AbstractRegionServerCallable.getLocation() 
+MultiServerCallable.getLocation() 
 
 
 HRegionLocation
@@ -310,26 +310,26 @@ service.
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[] row)
+RegionLocator.getRegionLocation(byte[] row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[] row)
+HRegionLocator.getRegionLocation(byte[] row)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-HRegionLocator.getRegionLocation(byte[] row,
+RegionLocator.getRegionLocation(byte[] row,
   boolean reload)
 Finds the region on which the given row is being 
served.
 
 
 
 HRegionLocation
-RegionLocator.getRegionLocation(byte[] row,
+HRegionLocator.getRegionLocation(byte[] row,
   boolean reload)
 Finds the region on which the given row is being 
served.
 
@@ -399,14 +399,14 @@ service.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-HRegionLocator.getAllRegionLocations() 
-
-
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 RegionLocator.getAllRegionLocations()
 Retrieves all of the regions associated with this 
table.
 
 
+
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
+HRegionLocator.getAllRegionLocations() 
+
 
 private PairList,http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List>
 HTable.getKeysAndRegionsInRange(byte[] startKey,



[15/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
index 1f571f8..d078cfc 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/exceptions/class-use/DeserializationException.html
@@ -144,14 +144,6 @@
 
 
 
-static HTableDescriptor
-HTableDescriptor.parseFrom(byte[] bytes) 
-
-
-static ClusterId
-ClusterId.parseFrom(byte[] bytes) 
-
-
 static HColumnDescriptor
 HColumnDescriptor.parseFrom(byte[] bytes) 
 
@@ -160,6 +152,14 @@
 HRegionInfo.parseFrom(byte[] bytes) 
 
 
+static ClusterId
+ClusterId.parseFrom(byte[] bytes) 
+
+
+static HTableDescriptor
+HTableDescriptor.parseFrom(byte[] bytes) 
+
+
 static SplitLogTask
 SplitLogTask.parseFrom(byte[] data) 
 
@@ -253,145 +253,145 @@
 ByteArrayComparable.parseFrom(byte[] pbBytes) 
 
 
-static DependentColumnFilter
-DependentColumnFilter.parseFrom(byte[] pbBytes) 
+static ColumnPaginationFilter
+ColumnPaginationFilter.parseFrom(byte[] pbBytes) 
 
 
-static PrefixFilter
-PrefixFilter.parseFrom(byte[] pbBytes) 
+static InclusiveStopFilter
+InclusiveStopFilter.parseFrom(byte[] pbBytes) 
 
 
-static FamilyFilter
-FamilyFilter.parseFrom(byte[] pbBytes) 
+static FilterWrapper
+FilterWrapper.parseFrom(byte[] pbBytes) 
 
 
-static RegexStringComparator
-RegexStringComparator.parseFrom(byte[] pbBytes) 
+static BinaryComparator
+BinaryComparator.parseFrom(byte[] pbBytes) 
 
 
 static WhileMatchFilter
 WhileMatchFilter.parseFrom(byte[] pbBytes) 
 
 
-static InclusiveStopFilter
-InclusiveStopFilter.parseFrom(byte[] pbBytes) 
-
-
-static FirstKeyOnlyFilter
-FirstKeyOnlyFilter.parseFrom(byte[] pbBytes) 
-
-
-static TimestampsFilter
-TimestampsFilter.parseFrom(byte[] pbBytes) 
+static FuzzyRowFilter
+FuzzyRowFilter.parseFrom(byte[] pbBytes) 
 
 
-static Filter
-Filter.parseFrom(byte[] pbBytes)
-Concrete implementers can signal a failure condition in 
their code by throwing an
- http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException.
-
+static FirstKeyValueMatchingQualifiersFilter
+FirstKeyValueMatchingQualifiersFilter.parseFrom(byte[] pbBytes)
+Deprecated. 
+ 
 
 
 static KeyOnlyFilter
 KeyOnlyFilter.parseFrom(byte[] pbBytes) 
 
 
-static BinaryPrefixComparator
-BinaryPrefixComparator.parseFrom(byte[] pbBytes) 
+static RegexStringComparator
+RegexStringComparator.parseFrom(byte[] pbBytes) 
 
 
-static LongComparator
-LongComparator.parseFrom(byte[] pbBytes) 
+static ValueFilter
+ValueFilter.parseFrom(byte[] pbBytes) 
 
 
-static MultipleColumnPrefixFilter
-MultipleColumnPrefixFilter.parseFrom(byte[] pbBytes) 
+static LongComparator
+LongComparator.parseFrom(byte[] pbBytes) 
 
 
-static BitComparator
-BitComparator.parseFrom(byte[] pbBytes) 
+static PrefixFilter
+PrefixFilter.parseFrom(byte[] pbBytes) 
 
 
-static QualifierFilter
-QualifierFilter.parseFrom(byte[] pbBytes) 
+static RandomRowFilter
+RandomRowFilter.parseFrom(byte[] pbBytes) 
 
 
+static SingleColumnValueExcludeFilter
+SingleColumnValueExcludeFilter.parseFrom(byte[] pbBytes) 
+
+
 static SubstringComparator
 SubstringComparator.parseFrom(byte[] pbBytes) 
 
+
+static FilterList
+FilterList.parseFrom(byte[] pbBytes) 
+
 
-static SkipFilter
-SkipFilter.parseFrom(byte[] pbBytes) 
+static ColumnPrefixFilter
+ColumnPrefixFilter.parseFrom(byte[] pbBytes) 
 
 
-static ColumnCountGetFilter
-ColumnCountGetFilter.parseFrom(byte[] pbBytes) 
+static TimestampsFilter
+TimestampsFilter.parseFrom(byte[] pbBytes) 
 
 
-static RandomRowFilter
-RandomRowFilter.parseFrom(byte[] pbBytes) 
+static RowFilter
+RowFilter.parseFrom(byte[] pbBytes) 
 
 
-static SingleColumnValueExcludeFilter
-SingleColumnValueExcludeFilter.parseFrom(byte[] pbBytes) 
+static SkipFilter
+SkipFilter.parseFrom(byte[] pbBytes) 
 
 
-static FuzzyRowFilter
-FuzzyRowFilter.parseFrom(byte[] pbBytes) 
+static BinaryPrefixComparator
+BinaryPrefixComparator.parseFrom(byte[] pbBytes) 
 
 
-static SingleColumnValueFilter
-SingleColumnValueFilter.parseFrom(byte[] pbBytes) 
+static NullComparator
+NullComparator.parseFrom(byte[] pbBytes) 
 
 
-static FilterList
-FilterList.parseFrom(byte[] pbBytes) 
+static SingleColumnValueFilter
+SingleColumnValueFilter.parseFrom(byte[] pbBytes) 
 
 
-static ColumnRangeFilter
-ColumnRangeFilter.parseFrom(byte[] pbBytes) 
+static FirstKeyOnlyFilter
+FirstKeyOnlyFilter.parseFrom(byte[] pbBytes) 
 
 
-static MultiRowRangeFilter
-MultiRowRangeFilter.parseFrom(byte[] pbBytes) 
+static ColumnRangeFilter
+ColumnRangeFilter.parseFrom(byte[] pbBytes) 
 
 
-static FirstKeyValueMatchingQualifiersFilter
-FirstKeyValueMatchingQualifi

[47/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html 
b/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
index b9fd2a6..a420a89 100644
--- a/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
+++ b/apidocs/org/apache/hadoop/hbase/KeepDeletedCells.html
@@ -249,7 +249,7 @@ the order they are declared.
 
 
 values
-public static KeepDeletedCells[] values()
+public static KeepDeletedCells[] values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -266,7 +266,7 @@ for (KeepDeletedCells c : KeepDeletedCells.values())
 
 
 valueOf
-public static KeepDeletedCells valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+public static KeepDeletedCells valueOf(http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 6b18f9a..3e920b6 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -919,23 +919,23 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-Put
-Put.add(Cell kv)
-Add the specified KeyValue to this Put operation.
-
-
-
 Append
 Append.add(Cell cell)
 Add column and value to this Append operation.
 
 
-
+
 Increment
 Increment.add(Cell cell)
 Add the specified KeyValue to this operation.
 
 
+
+Put
+Put.add(Cell kv)
+Add the specified KeyValue to this Put operation.
+
+
 
 Delete
 Delete.addDeleteMarker(Cell kv)
@@ -1013,27 +1013,27 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 boolean partial) 
 
 
-Put
-Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
-
 Append
 Append.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
-
+
 Increment
 Increment.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
 
+
+Put
+Put.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
 
+Delete
+Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
+
+
 Mutation
 Mutation.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map)
 Method for setting the put's familyMap
 
 
-
-Delete
-Delete.setFamilyCellMap(http://docs.oracle.com/javase/7/docs/api/java/util/NavigableMap.html?is-external=true";
 title="class or interface in java.util">NavigableMapList> map) 
-
 
 
 
@@ -1050,52 +1050,54 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 
 
 Cell
-MultiRowRangeFilter.getNextCellHint(Cell currentKV) 
+TimestampsFilter.getNextCellHint(Ce

[27/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
index 1689c47..38c22d9 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/class-use/InterProcessLock.MetadataHandler.html
@@ -166,13 +166,13 @@
 
 
 
-protected InterProcessLock.MetadataHandler
-ZKInterProcessLockBase.handler 
-
-
 private InterProcessLock.MetadataHandler
 ZKInterProcessReadWriteLock.handler 
 
+
+protected InterProcessLock.MetadataHandler
+ZKInterProcessLockBase.handler 
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
index 344c681..4cec977 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/KeyValue.html
@@ -785,22 +785,28 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
+DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey) 
+
+
+http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey)
 Gets initial, full list of candidate store files to check 
for row-key-before.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 StripeStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey)
 See StoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue)
  for details on this methods.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-DefaultStoreFileManager.getCandidateFilesForRowKeyBefore(KeyValue targetKey) 
+DefaultStoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator candidateFiles,
+  KeyValue targetKey,
+  Cell candidate) 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 StoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator candidateFiles,
   KeyValue targetKey,
@@ -808,7 +814,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 Updates the candidate list for finding row key before.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
 StripeStoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator candidateFiles,
   KeyValue targetKey,
@@ -818,12 +824,6 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
  for details on this methods.
 
 
-
-http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator
-DefaultStoreFileManager.updateCandidateFilesForRowKeyBefore(http://docs.oracle.com/javase/7/docs/api/java/util/Iterator.html?is-external=true";
 title="class or interface in java.util">Iterator candidateFiles,
-  KeyValue targetKey,
-  Cell candidate) 
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/NamespaceDescriptor.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/class-use/Name

[21/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
index fced4aa..79477f4 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
@@ -381,13 +381,13 @@ service.
 
 
 Result
-Table.append(Append append)
+HTable.append(Append append)
 Appends values to one or more columns within a single 
row.
 
 
 
 Result
-HTable.append(Append append)
+Table.append(Append append)
 Appends values to one or more columns within a single 
row.
 
 
@@ -404,16 +404,16 @@ service.
 
 
 
-Result
-RpcRetryingCallerWithReadReplicas.ReplicaRegionServerCallable.call(int callTimeout) 
+Result[]
+ScannerCallable.call(int callTimeout) 
 
 
-Result[]
-ClientSmallScanner.SmallScannerCallable.call(int timeout) 
+Result
+RpcRetryingCallerWithReadReplicas.ReplicaRegionServerCallable.call(int callTimeout) 
 
 
 Result[]
-ScannerCallable.call(int callTimeout) 
+ClientSmallScanner.SmallScannerCallable.call(int timeout) 
 
 
 Result[]
@@ -480,13 +480,13 @@ service.
 
 
 Result
-Table.get(Get get)
+HTable.get(Get get)
 Extracts certain cells from a given row.
 
 
 
 Result
-HTable.get(Get get)
+Table.get(Get get)
 Extracts certain cells from a given row.
 
 
@@ -501,13 +501,13 @@ service.
 
 
 Result[]
-Table.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List gets)
+HTable.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List gets)
 Extracts certain cells from the given rows, in batch.
 
 
 
 Result[]
-HTable.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List gets)
+Table.get(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List gets)
 Extracts certain cells from the given rows, in batch.
 
 
@@ -517,13 +517,13 @@ service.
 
 
 Result
-Table.increment(Increment increment)
+HTable.increment(Increment increment)
 Increments one or more columns within a single row.
 
 
 
 Result
-HTable.increment(Increment increment)
+Table.increment(Increment increment)
 Increments one or more columns within a single row.
 
 
@@ -537,11 +537,13 @@ service.
 
 
 Result
-ClientSmallReversedScanner.next() 
+ResultScanner.next()
+Grab the next row's worth of values.
+
 
 
 Result
-ClientSmallScanner.next() 
+ClientSmallReversedScanner.next() 
 
 
 Result
@@ -549,27 +551,25 @@ service.
 
 
 Result
-ResultScanner.next()
-Grab the next row's worth of values.
-
+ClientSmallScanner.next() 
 
 
 Result
-ClientSideRegionScanner.next() 
+TableSnapshotScanner.next() 
 
 
 Result
-TableSnapshotScanner.next() 
+ClientSideRegionScanner.next() 
 
 
 Result[]
-AbstractClientScanner.next(int nbRows)
-Get nbRows rows.
-
+ResultScanner.next(int nbRows) 
 
 
 Result[]
-ResultScanner.next(int nbRows) 
+AbstractClientScanner.next(int nbRows)
+Get nbRows rows.
+
 
 
 protected Result
@@ -914,11 +914,9 @@ service.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter)
-Builds a TableRecordReader.
-
+  
org.apache.hadoop.mapred.Reporter reporter) 
 
 
 org.apache.hadoop.mapred.RecordReader
@@ -928,9 +926,11 @@ service.
 
 
 org.apache.hadoop.mapred.RecordReader
-MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter) 
+  
org.apache.hadoop.mapred.Reporter reporter)
+Builds a TableRecordReader.
+
 
 
 
@@ -949,28 +949,28 @@ service.
 
 
 void
-GroupingTableMap.map(ImmutableBytesWritable key,
+IdentityTableMap.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapred.OutputCollector output,
   org.apache.hadoop.mapred.Reporter reporter)
-Extract the grouping columns from value to construct a new 
key.
+Pass the key, value to reduce
 
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
-  Result values,
+GroupingTableM

[19/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Decoder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Decoder.html 
b/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Decoder.html
index 3ddf9ee..2d9959a 100644
--- a/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Decoder.html
+++ b/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Decoder.html
@@ -147,15 +147,15 @@
 
 
 Codec.Decoder
-CellCodec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer buf) 
+CellCodecWithTags.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer buf) 
 
 
 Codec.Decoder
-Codec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer buf) 
+CellCodec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer buf) 
 
 
 Codec.Decoder
-CellCodecWithTags.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer buf) 
+Codec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/nio/ByteBuffer.html?is-external=true";
 title="class or interface in 
java.nio">ByteBuffer buf) 
 
 
 Codec.Decoder
@@ -173,15 +173,15 @@
 
 
 Codec.Decoder
-CellCodec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true";
 title="class or interface in 
java.io">InputStream is) 
+CellCodecWithTags.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true";
 title="class or interface in 
java.io">InputStream is) 
 
 
 Codec.Decoder
-Codec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true";
 title="class or interface in 
java.io">InputStream is) 
+CellCodec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true";
 title="class or interface in 
java.io">InputStream is) 
 
 
 Codec.Decoder
-CellCodecWithTags.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true";
 title="class or interface in 
java.io">InputStream is) 
+Codec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true";
 title="class or interface in 
java.io">InputStream is) 
 
 
 Codec.Decoder
@@ -243,11 +243,11 @@
 
 
 Codec.Decoder
-WALCellCodec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true";
 title="class or interface in 
java.io">InputStream is) 
+SecureWALCellCodec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true";
 title="class or interface in 
java.io">InputStream is) 
 
 
 Codec.Decoder
-SecureWALCellCodec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true";
 title="class or interface in 
java.io">InputStream is) 
+WALCellCodec.getDecoder(http://docs.oracle.com/javase/7/docs/api/java/io/InputStream.html?is-external=true";
 title="class or interface in 
java.io">InputStream is) 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Encoder.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Encoder.html 
b/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Encoder.html
index 3ee0e94..afe41fd 100644
--- a/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Encoder.html
+++ b/devapidocs/org/apache/hadoop/hbase/codec/class-use/Codec.Encoder.html
@@ -139,15 +139,15 @@
 
 
 Codec.Encoder
-CellCodec.getEncoder(http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true";
 title="class or interface in 
java.io">OutputStream os) 
+CellCodecWithTags.getEncoder(http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true";
 title="class or interface in 
java.io">OutputStream os) 
 
 
 Codec.Encoder
-Codec.getEncoder(http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true";
 title="class or interface in 
java.io">OutputStream os) 
+CellCodec.getEncoder(http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true";
 title="class or interface in 
java.io">OutputStream os) 
 
 
 Codec.Encoder
-CellCodecWithTags.getEncoder(http://docs.oracle.com/javase/7/docs/api/java/io/OutputStream.html?is-external=true";
 title="class or interface in 
java.io">OutputStream os) 
+Codec.getEncoder(http://docs.oracle.com/javase/7/docs/ap

[08/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/master/class-use/HMaster.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/class-use/HMaster.html 
b/devapidocs/org/apache/hadoop/hbase/master/class-use/HMaster.html
index 50e469a..c350b87 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/class-use/HMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/class-use/HMaster.html
@@ -196,31 +196,31 @@
 
 
 private HMaster
-MetricsMasterWrapperImpl.master 
+ExpiredMobFileCleanerChore.master 
 
 
 private HMaster
-MasterRpcServices.master 
+HMaster.InitializationMonitor.master 
 
 
 private HMaster
-ExpiredMobFileCleanerChore.master 
+ClusterStatusPublisher.master 
 
 
 private HMaster
-MasterMobCompactionThread.master 
+MetricsMasterWrapperImpl.master 
 
 
 private HMaster
-HMaster.InitializationMonitor.master 
+MasterMobCompactionThread.master 
 
 
 private HMaster
-ClusterStatusPublisher.master 
+MobCompactionChore.master 
 
 
 private HMaster
-MobCompactionChore.master 
+MasterRpcServices.master 
 
 
 private HMaster
@@ -432,15 +432,15 @@
 
 
 private HMaster
-BackupMasterStatusTmpl.ImplData.m_master 
+MasterStatusTmpl.ImplData.m_master 
 
 
 private HMaster
-MasterStatusTmpl.ImplData.m_master 
+RegionServerListTmpl.ImplData.m_master 
 
 
 private HMaster
-RegionServerListTmpl.ImplData.m_master 
+BackupMasterStatusTmpl.ImplData.m_master 
 
 
 private HMaster
@@ -448,11 +448,11 @@
 
 
 private HMaster
-MasterStatusTmplImpl.master 
+BackupMasterStatusTmplImpl.master 
 
 
 private HMaster
-BackupMasterStatusTmplImpl.master 
+MasterStatusTmplImpl.master 
 
 
 
@@ -465,15 +465,15 @@
 
 
 HMaster
-BackupMasterStatusTmpl.ImplData.getMaster() 
+MasterStatusTmpl.ImplData.getMaster() 
 
 
 HMaster
-MasterStatusTmpl.ImplData.getMaster() 
+RegionServerListTmpl.ImplData.getMaster() 
 
 
 HMaster
-RegionServerListTmpl.ImplData.getMaster() 
+BackupMasterStatusTmpl.ImplData.getMaster() 
 
 
 
@@ -486,57 +486,57 @@
 
 
 org.jamon.Renderer
-BackupMasterStatusTmpl.makeRenderer(HMaster master) 
+MasterStatusTmpl.makeRenderer(HMaster master) 
 
 
 org.jamon.Renderer
-MasterStatusTmpl.makeRenderer(HMaster master) 
+RegionServerListTmpl.makeRenderer(HMaster master) 
 
 
 org.jamon.Renderer
-RegionServerListTmpl.makeRenderer(HMaster master) 
+BackupMasterStatusTmpl.makeRenderer(HMaster master) 
 
 
 void
-BackupMasterStatusTmpl.render(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
+MasterStatusTmpl.render(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
 HMaster master) 
 
 
 void
-MasterStatusTmpl.render(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
+RegionServerListTmpl.render(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
 HMaster master) 
 
 
 void
-RegionServerListTmpl.render(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
+BackupMasterStatusTmpl.render(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
 HMaster master) 
 
 
 void
-BackupMasterStatusTmpl.renderNoFlush(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
+MasterStatusTmpl.renderNoFlush(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
   HMaster master) 
 
 
 void
-MasterStatusTmpl.renderNoFlush(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
+RegionServerListTmpl.renderNoFlush(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
   HMaster master) 
 
 
 void
-RegionServerListTmpl.renderNoFlush(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
+BackupMasterStatusTmpl.renderNoFlush(http://docs.oracle.com/javase/7/docs/api/java/io/Writer.html?is-external=true";
 title="class or interface in java.io">Writer jamonWriter,
   HMaster master) 
 
 
 void
-BackupMasterStatusTmpl.ImplData.setMaster(HMaster master) 
+MasterStatusTmpl.ImplData.setMaster(HMaster master) 
 
 
 void
-MasterStatusTmpl.ImplData.setMaster(HMaster master) 
+RegionServerListTmpl.ImplData

[05/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
index dd93329..5d385bf 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/TableProcedureInterface.TableOperationType.html
@@ -96,11 +96,11 @@
 
 
 TableProcedureInterface.TableOperationType
-DeleteColumnFamilyProcedure.getTableOperationType() 
+CreateNamespaceProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-CreateNamespaceProcedure.getTableOperationType() 
+EnableTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
@@ -108,53 +108,53 @@
 
 
 TableProcedureInterface.TableOperationType
-CloneSnapshotProcedure.getTableOperationType() 
+DeleteColumnFamilyProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-TableProcedureInterface.getTableOperationType()
-Given an operation type we can take decisions about what to 
do with pending operations.
-
+CreateTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-DeleteTableProcedure.getTableOperationType() 
+ModifyTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-CreateTableProcedure.getTableOperationType() 
+DisableTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-EnableTableProcedure.getTableOperationType() 
+AddColumnFamilyProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-TruncateTableProcedure.getTableOperationType() 
+CloneSnapshotProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-ModifyColumnFamilyProcedure.getTableOperationType() 
+DeleteTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-RestoreSnapshotProcedure.getTableOperationType() 
+ModifyNamespaceProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-ModifyTableProcedure.getTableOperationType() 
+ModifyColumnFamilyProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-ModifyNamespaceProcedure.getTableOperationType() 
+TableProcedureInterface.getTableOperationType()
+Given an operation type we can take decisions about what to 
do with pending operations.
+
 
 
 TableProcedureInterface.TableOperationType
-DisableTableProcedure.getTableOperationType() 
+TruncateTableProcedure.getTableOperationType() 
 
 
 TableProcedureInterface.TableOperationType
-AddColumnFamilyProcedure.getTableOperationType() 
+RestoreSnapshotProcedure.getTableOperationType() 
 
 
 static TableProcedureInterface.TableOperationType

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
index 7767a11..aa572b3 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html
@@ -152,8 +152,8 @@
 
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.master.procedure.TableProcedureInterface.TableOperationType
 org.apache.hadoop.hbase.master.procedure.ServerProcedureInterface.ServerOperationType
+org.apache.hadoop.hbase.master.procedure.TableProcedureInterface.TableOperationType
 org.apache.hadoop.hbase.master.procedure.DisableTableProcedure.MarkRegionOfflineOpResult
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/master/snapshot/class-use/SnapshotManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/snapshot/class-use/SnapshotManager.html
 
b/devapidocs/org/apache/hadoop/hbase/master/snapshot/class-use/SnapshotManager.html
index d0ec8d7..8628091 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/snapshot/cl

[11/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
index 62df01b..5de8029 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/class-use/Cacheable.html
@@ -168,35 +168,35 @@
 
 
 Cacheable
-InclusiveCombinedBlockCache.getBlock(BlockCacheKey cacheKey,
+LruBlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
-boolean updateCacheMetrics) 
+boolean updateCacheMetrics)
+Get the buffer of the block with the specified name.
+
 
 
 Cacheable
-CombinedBlockCache.getBlock(BlockCacheKey cacheKey,
+BlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
-boolean updateCacheMetrics) 
+boolean updateCacheMetrics)
+Fetch block from cache.
+
 
 
 Cacheable
-LruBlockCache.getBlock(BlockCacheKey cacheKey,
+CombinedBlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
-boolean updateCacheMetrics)
-Get the buffer of the block with the specified name.
-
+boolean updateCacheMetrics) 
 
 
 Cacheable
-BlockCache.getBlock(BlockCacheKey cacheKey,
+InclusiveCombinedBlockCache.getBlock(BlockCacheKey cacheKey,
 boolean caching,
 boolean repeat,
-boolean updateCacheMetrics)
-Fetch block from cache.
-
+boolean updateCacheMetrics) 
 
 
 Cacheable
@@ -245,23 +245,23 @@
 
 
 void
-CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
-Cacheable buf) 
-
-
-void
 LruBlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf)
 Cache the block with the specified name and buffer.
 
 
-
+
 void
 BlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf)
 Add block to cache (defaults to not in-memory).
 
 
+
+void
+CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
+Cacheable buf) 
+
 
 void
 MemcachedBlockCache.cacheBlock(BlockCacheKey cacheKey,
@@ -269,35 +269,35 @@
 
 
 void
-InclusiveCombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
+LruBlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf,
 boolean inMemory,
-boolean cacheDataInL1) 
+boolean cacheDataInL1)
+Cache the block with the specified name and buffer.
+
 
 
 void
-CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
+BlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf,
 boolean inMemory,
-boolean cacheDataInL1) 
+boolean cacheDataInL1)
+Add block to cache.
+
 
 
 void
-LruBlockCache.cacheBlock(BlockCacheKey cacheKey,
+CombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf,
 boolean inMemory,
-boolean cacheDataInL1)
-Cache the block with the specified name and buffer.
-
+boolean cacheDataInL1) 
 
 
 void
-BlockCache.cacheBlock(BlockCacheKey cacheKey,
+InclusiveCombinedBlockCache.cacheBlock(BlockCacheKey cacheKey,
 Cacheable buf,
 boolean inMemory,
-boolean cacheDataInL1)
-Add block to cache.
-
+boolean cacheDataInL1) 
 
 
 void
@@ -313,15 +313,10 @@
 
 
 void
-CombinedBlockCache.returnBlock(BlockCacheKey cacheKey,
-  Cacheable block) 
-
-
-void
 LruBlockCache.returnBlock(BlockCacheKey cacheKey,
   Cacheable block) 
 
-
+
 void
 BlockCache.returnBlock(BlockCacheKey cacheKey,
   Cacheable block)
@@ -329,6 +324,11 @@
  is over.
 
 
+
+void
+CombinedBlockCache.returnBlock(BlockCacheKey cacheKey,
+  Cacheable block) 
+
 
 void
 MemcachedBlockCache.returnBlock(BlockCacheKey cacheKey,
@@ -411,13 +411,13 @@
 
 
 Cacheable
-ByteBufferIOEngine.read(long offset,
+FileMmapEngine.read(long offset,
 int length,
 CacheableDeserializer deserializer) 
 
 
 Cacheable
-FileMmapEngine.read(long offset,
+ByteBufferIOEngine.read(long offset,
 int length,
 CacheableDeserializer deserializer) 
 
@@ -500,13 +500,13 @@
 
 
 Cacheable
-ByteBufferIOEngine.read(long offset,
+FileMmapEngine.read(long offset,
 int length,
 CacheableDeserializer deserializer) 
 
 
 Cacheable
-FileMmapEngine.read(long offset,
+ByteBufferIOEngine.read(long offset,
 int length,
 CacheableDeserializer deserializer) 

[24/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index cca6a9a..f5773c2 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -754,52 +754,52 @@ service.
 
 
 
-private TableName
-HRegionLocator.tableName 
+protected TableName
+AbstractRegionServerCallable.tableName 
 
 
-protected TableName
-RpcRetryingCallerWithReadReplicas.tableName 
+private TableName
+BufferedMutatorParams.tableName 
 
 
-private TableName
-BufferedMutatorImpl.tableName 
+protected TableName
+RpcRetryingCallerWithReadReplicas.tableName 
 
 
 private TableName
-TableState.tableName 
+AsyncProcess.AsyncRequestFutureImpl.tableName 
 
 
-private TableName
-BufferedMutatorParams.tableName 
-
-
 protected TableName
 RegionAdminServiceCallable.tableName 
 
+
+private TableName
+HRegionLocator.tableName 
+
 
 private TableName
-AsyncProcess.AsyncRequestFutureImpl.tableName 
+HTable.tableName 
 
 
 private TableName
-HTable.tableName 
+ClientScanner.tableName 
 
 
 private TableName
-ClientScanner.tableName 
+TableState.tableName 
 
 
 private TableName
-ScannerCallableWithReplicas.tableName 
+HBaseAdmin.TableFuture.tableName 
 
 
 private TableName
-HBaseAdmin.TableFuture.tableName 
+ScannerCallableWithReplicas.tableName 
 
 
-protected TableName
-AbstractRegionServerCallable.tableName 
+private TableName
+BufferedMutatorImpl.tableName 
 
 
 
@@ -833,31 +833,31 @@ service.
 
 
 TableName
-HRegionLocator.getName() 
+RegionLocator.getName()
+Gets the fully qualified table name instance of this 
table.
+
 
 
 TableName
-BufferedMutatorImpl.getName() 
+HRegionLocator.getName() 
 
 
 TableName
-BufferedMutator.getName()
-Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
-
+HTable.getName() 
 
 
 TableName
-Table.getName()
-Gets the fully qualified table name instance of this 
table.
+BufferedMutator.getName()
+Gets the fully qualified table name instance of the table 
that this BufferedMutator writes to.
 
 
 
 TableName
-HTable.getName() 
+BufferedMutatorImpl.getName() 
 
 
 TableName
-RegionLocator.getName()
+Table.getName()
 Gets the fully qualified table name instance of this 
table.
 
 
@@ -871,21 +871,21 @@ service.
 
 
 TableName
-TableState.getTableName()
-Table name for state
-
+AbstractRegionServerCallable.getTableName() 
 
 
 TableName
 BufferedMutatorParams.getTableName() 
 
 
-protected TableName
-HBaseAdmin.TableFuture.getTableName() 
+TableName
+TableState.getTableName()
+Table name for state
+
 
 
-TableName
-AbstractRegionServerCallable.getTableName() 
+protected TableName
+HBaseAdmin.TableFuture.getTableName() 
 
 
 private TableName
@@ -1007,16 +1007,16 @@ service.
 
 
 void
-ClusterConnection.cacheLocation(TableName tableName,
-  RegionLocations location) 
-
-
-void
 MetaCache.cacheLocation(TableName tableName,
   RegionLocations locations)
 Put a newly discovered HRegionLocation into the cache.
 
 
+
+void
+ClusterConnection.cacheLocation(TableName tableName,
+  RegionLocations location) 
+
 
 private void
 ConnectionImplementation.cacheLocation(TableName tableName,
@@ -1348,15 +1348,15 @@ service.
 
 
 BufferedMutator
-ConnectionImplementation.getBufferedMutator(TableName tableName) 
-
-
-BufferedMutator
 Connection.getBufferedMutator(TableName tableName)
 
  Retrieve a BufferedMutator for performing 
client-side buffering of writes.
 
 
+
+BufferedMutator
+ConnectionImplementation.getBufferedMutator(TableName tableName) 
+
 
 (package private) RegionLocations
 ConnectionImplementation.getCachedLocation(TableName tableName,
@@ -1470,36 +1470,36 @@ service.
 
 
 RegionLocator
-ConnectionImplementation.getRegionLocator(TableName tableName) 
-
-
-RegionLocator
 Connection.getRegionLocator(TableName tableName)
 Retrieve a RegionLocator implementation to inspect region 
information on a table.
 
 
-
-Table
-ConnectionImplementation.getTable(TableName tableName) 
-
 
+RegionLocator
+ConnectionImplementation.getRegionLocator(TableName tableName) 
+
+
 Table
 Connection.getTable(TableName tableName)
 Retrieve a Table implementation for accessing a table.
 
 
-
+
 Table
-ConnectionImplementation.getTable(TableName tableName,
-http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class or interface in 
java.util.concurrent">ExecutorService pool) 
+ConnectionImplementation.getTable(TableName tableName) 
 
-
+
 Table
 Connection.getTable(TableName tableName,
 http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/ExecutorService.html?is-external=true";
 title="class

[06/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
index 571e6aa..e8782e2 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/class-use/MasterProcedureEnv.html
@@ -114,23 +114,23 @@
 
 
 void
-MasterObserver.preAbortProcedure(ObserverContext ctx,
+BaseMasterAndRegionObserver.preAbortProcedure(ObserverContext ctx,
   ProcedureExecutor procEnv,
-  long procId)
-Called before a abortProcedure request has been 
processed.
-
+  long procId) 
 
 
 void
-BaseMasterAndRegionObserver.preAbortProcedure(ObserverContext ctx,
+BaseMasterObserver.preAbortProcedure(ObserverContext ctx,
   ProcedureExecutor procEnv,
   long procId) 
 
 
 void
-BaseMasterObserver.preAbortProcedure(ObserverContext ctx,
+MasterObserver.preAbortProcedure(ObserverContext ctx,
   ProcedureExecutor procEnv,
-  long procId) 
+  long procId)
+Called before a abortProcedure request has been 
processed.
+
 
 
 
@@ -161,11 +161,11 @@
 
 
 ProcedureExecutor
-MasterServices.getMasterProcedureExecutor() 
+HMaster.getMasterProcedureExecutor() 
 
 
 ProcedureExecutor
-HMaster.getMasterProcedureExecutor() 
+MasterServices.getMasterProcedureExecutor() 
 
 
 
@@ -197,11 +197,11 @@
 
 
 boolean
-DeleteColumnFamilyProcedure.abort(MasterProcedureEnv env) 
+CreateNamespaceProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-CreateNamespaceProcedure.abort(MasterProcedureEnv env) 
+EnableTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
@@ -209,59 +209,59 @@
 
 
 boolean
-CloneSnapshotProcedure.abort(MasterProcedureEnv env) 
+DeleteColumnFamilyProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-DeleteTableProcedure.abort(MasterProcedureEnv env) 
+CreateTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-CreateTableProcedure.abort(MasterProcedureEnv env) 
+ModifyTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-EnableTableProcedure.abort(MasterProcedureEnv env) 
+DisableTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-TruncateTableProcedure.abort(MasterProcedureEnv env) 
+AddColumnFamilyProcedure.abort(MasterProcedureEnv env) 
 
 
-boolean
-ModifyColumnFamilyProcedure.abort(MasterProcedureEnv env) 
+protected boolean
+ServerCrashProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-RestoreSnapshotProcedure.abort(MasterProcedureEnv env) 
+CloneSnapshotProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-ModifyTableProcedure.abort(MasterProcedureEnv env) 
+DeleteTableProcedure.abort(MasterProcedureEnv env) 
 
 
-protected boolean
-ServerCrashProcedure.abort(MasterProcedureEnv env) 
+boolean
+ModifyNamespaceProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-ModifyNamespaceProcedure.abort(MasterProcedureEnv env) 
+ModifyColumnFamilyProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-DisableTableProcedure.abort(MasterProcedureEnv env) 
+TruncateTableProcedure.abort(MasterProcedureEnv env) 
 
 
 boolean
-AddColumnFamilyProcedure.abort(MasterProcedureEnv env) 
+RestoreSnapshotProcedure.abort(MasterProcedureEnv env) 
 
 
 protected boolean
-DeleteColumnFamilyProcedure.acquireLock(MasterProcedureEnv env) 
+CreateNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-CreateNamespaceProcedure.acquireLock(MasterProcedureEnv env) 
+EnableTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
@@ -269,51 +269,51 @@
 
 
 protected boolean
-CloneSnapshotProcedure.acquireLock(MasterProcedureEnv env) 
+DeleteColumnFamilyProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-DeleteTableProcedure.acquireLock(MasterProcedureEnv env) 
+CreateTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-CreateTableProcedure.acquireLock(MasterProcedureEnv env) 
+ModifyTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-EnableTableProcedure.acquireLock(MasterProcedureEnv env) 
+DisableTableProcedure.acquireLock(MasterProcedureEnv env) 
 
 
 protected boolean
-TruncateTableProcedure.acquireLock(MasterProcedureEnv env) 
+AddColumnFamilyProcedure.acquireLock(Ma

[02/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
index 7d3994e..432c06e 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HStore.html
@@ -540,36 +540,46 @@ implements 
+StoreFileWriter
+createWriterInTmp(long maxKeyCount,
+  Compression.Algorithm compression,
+  boolean isCompaction,
+  boolean includeMVCCReadpoint,
+  boolean includesTag,
+  boolean shouldDropBehind,
+  TimeRangeTracker trt) 
+
+
 protected long
 delete(KeyValue kv)
 Adds a value to the memstore
 
 
-
+
 void
 deleteChangedReaderObserver(ChangedReadersObserver o) 
 
-
+
 void
 deregisterChildren(ConfigurationManager manager)
 Needs to be called to deregister the children from the 
manager.
 
 
-
+
 static long
 determineTTLFromFamily(HColumnDescriptor family) 
 
-
+
 void
 finalizeFlush()
 This method is called when it is clear that the flush to 
disk is completed.
 
 
-
+
 private void
 finishCompactionRequest(CompactionRequest cr) 
 
-
+
 protected http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
 flushCache(long logCacheFlushId,
 MemStoreSnapshot snapshot,
@@ -578,175 +588,175 @@ implements Write out current snapshot.
 
 
-
+
 long
 getAvgStoreFileAge() 
 
-
+
 long
 getBlockingFileCount()
 The number of files required before flushes for this store 
will be blocked.
 
 
-
+
 static int
 getBytesPerChecksum(org.apache.hadoop.conf.Configuration conf)
 Returns the configured bytesPerChecksum value.
 
 
-
+
 CacheConfig
 getCacheConfig()
 Used for tests.
 
 
-
+
 static ChecksumType
 getChecksumType(org.apache.hadoop.conf.Configuration conf)
 Returns the configured checksum algorithm.
 
 
-
+
 static int
 getCloseCheckInterval() 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 getColumnFamilyName() 
 
-
+
 long
 getCompactedCellsCount() 
 
-
+
 long
 getCompactedCellsSize() 
 
-
+
 long
 getCompactionCheckMultiplier() 
 
-
+
 double
 getCompactionPressure()
 This value can represent the degree of emergency of 
compaction for this store.
 
 
-
+
 CompactionProgress
 getCompactionProgress()
 getter for CompactionProgress object
 
 
-
+
 int
 getCompactPriority() 
 
-
+
 CellComparator
 getComparator() 
 
-
+
 RegionCoprocessorHost
 getCoprocessorHost() 
 
-
+
 HFileDataBlockEncoder
 getDataBlockEncoder() 
 
-
+
 HColumnDescriptor
 getFamily() 
 
-
+
 org.apache.hadoop.fs.FileSystem
 getFileSystem() 
 
-
+
 long
 getFlushableSize() 
 
-
+
 long
 getFlushedCellsCount() 
 
-
+
 long
 getFlushedCellsSize() 
 
-
+
 long
 getFlushedOutputFileSize() 
 
-
+
 HRegion
 getHRegion() 
 
-
+
 long
 getLastCompactSize() 
 
-
+
 long
 getMajorCompactedCellsCount() 
 
-
+
 long
 getMajorCompactedCellsSize() 
 
-
+
 long
 getMaxMemstoreTS() 
 
-
+
 long
 getMaxSequenceId() 
 
-
+
 long
 getMaxStoreFileAge() 
 
-
+
 MemStore
 getMemStore() 
 
-
+
 long
 getMemstoreFlushSize() 
 
-
+
 long
 getMemStoreSize() 
 
-
+
 long
 getMinStoreFileAge() 
 
-
+
 long
 getNumHFiles() 
 
-
+
 long
 getNumReferenceFiles() 
 
-
+
 protected OffPeakHours
 getOffPeakHours() 
 
-
+
 HRegionFileSystem
 getRegionFileSystem() 
 
-
+
 HRegionInfo
 getRegionInfo() 
 
-
+
 ScanInfo
 getScanInfo() 
 
-
+
 KeyValueScanner
 getScanner(Scan scan,
 http://docs.oracle.com/javase/7/docs/api/java/util/NavigableSet.html?is-external=true";
 title="class or interface in 
java.util">NavigableSet targetCols,
@@ -754,7 +764,7 @@ implements Return a scanner for both the memstore and the HStore 
files.
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 getScanners(boolean cacheBlocks,
   boolean isGet,
@@ -768,7 +778,7 @@ implements 
+
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 getScanners(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List files,
   boolean cacheBlocks,
@@ -784,53 +794,53 @@ implements 
+
 long
 getSize() 
 
-
+
 long
 getSmallestReadPoint() 
 
-
+
 long
 getSnapshotSize()
 Returns the memstore snapshot size
 
 
-
+
 byte[]
 getSplitPoint()
 Determines if Store should be split
 
 

[09/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcServerInterface.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcServerInterface.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcServerInterface.html
index 0d42775..97c8c1a 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcServerInterface.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/class-use/RpcServerInterface.html
@@ -162,13 +162,13 @@
 
 
 RpcServerInterface
-RegionServerServices.getRpcServer()
-Returns a reference to the region server's RPC server
-
+HRegionServer.getRpcServer() 
 
 
 RpcServerInterface
-HRegionServer.getRpcServer() 
+RegionServerServices.getRpcServer()
+Returns a reference to the region server's RPC server
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/CellCreator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/CellCreator.html 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/CellCreator.html
index b476d76..c4bdc72 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/CellCreator.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/CellCreator.html
@@ -99,15 +99,15 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private CellCreator
-TextSortReducer.kvCreator 
+PutSortReducer.kvCreator 
 
 
-protected CellCreator
-TsvImporterMapper.kvCreator 
+private CellCreator
+TextSortReducer.kvCreator 
 
 
-private CellCreator
-PutSortReducer.kvCreator 
+protected CellCreator
+TsvImporterMapper.kvCreator 
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/TableSnapshotInputFormatImpl.InputSplit.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/TableSnapshotInputFormatImpl.InputSplit.html
 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/TableSnapshotInputFormatImpl.InputSplit.html
index f53c343..f199933 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/TableSnapshotInputFormatImpl.InputSplit.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/class-use/TableSnapshotInputFormatImpl.InputSplit.html
@@ -151,16 +151,16 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 
-static http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
-TableSnapshotInputFormatImpl.getSplits(org.apache.hadoop.conf.Configuration conf) 
-
-
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 MultiTableSnapshotInputFormatImpl.getSplits(org.apache.hadoop.conf.Configuration conf)
 Return the list of splits extracted from the 
scans/snapshots pushed to conf by
  MultiTableSnapshotInputFormatImpl.setInput(org.apache.hadoop.conf.Configuration,
 java.util.Map, org.apache.hadoop.fs.Path)
 
 
+
+static http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
+TableSnapshotInputFormatImpl.getSplits(org.apache.hadoop.conf.Configuration conf) 
+
 
 static http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List
 TableSnapshotInputFormatImpl.getSplits(Scan scan,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
index 4821a4e..29ae491 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/package-tree.html
@@ -268,9 +268,9 @@
 java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/7/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/7/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.mapreduce.CellCounter.CellCounterMapper.Counters
+org.apache.hadoop.hbase.mapreduce.SyncTable.SyncMapper.Counter
 org.apache.hadoop.hbase.

[03/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMemStore.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMemStore.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMemStore.html
index 4a02d8a..0d86c9b 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMemStore.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/AbstractMemStore.html
@@ -277,7 +277,8 @@ implements 
 private long
-internalAdd(Cell toAdd)
+internalAdd(Cell toAdd,
+  boolean mslabUsed)
 Internal version of add() that doesn't clone Cells with the
  allocator, and doesn't take the lock.
 
@@ -536,7 +537,7 @@ implements 
 
 upsert
-public long upsert(http://docs.oracle.com/javase/7/docs/api/java/lang/Iterable.html?is-external=true";
 title="class or interface in java.lang">Iterable cells,
+public long upsert(http://docs.oracle.com/javase/7/docs/api/java/lang/Iterable.html?is-external=true";
 title="class or interface in java.lang">Iterable cells,
   long readpoint)
 Update or insert the specified Cells.
  
@@ -563,7 +564,7 @@ implements 
 
 timeOfOldestEdit
-public long timeOfOldestEdit()
+public long timeOfOldestEdit()
 
 Specified by:
 timeOfOldestEdit in
 interface MemStore
@@ -576,7 +577,7 @@ implements 
 
 delete
-public long delete(Cell deleteCell)
+public long delete(Cell deleteCell)
 Write a delete
 
 Specified by:
@@ -591,7 +592,7 @@ implements 
 
 clearSnapshot
-public void clearSnapshot(long id)
+public void clearSnapshot(long id)
throws UnexpectedStateException
 The passed snapshot was successfully persisted; it can be 
let go.
 
@@ -608,7 +609,7 @@ implements 
 
 heapSize
-public long heapSize()
+public long heapSize()
 Get the entire heap usage for this MemStore not including 
keys in the
  snapshot.
 
@@ -624,7 +625,7 @@ implements 
 
 getSnapshotSize
-public long getSnapshotSize()
+public long getSnapshotSize()
 Description copied from interface: MemStore
 Return the size of the snapshot(s) if any
 
@@ -639,7 +640,7 @@ implements 
 
 toString
-public http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String toString()
+public http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String toString()
 
 Overrides:
 http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#toString()"
 title="class or interface in java.lang">toString in 
class http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
@@ -652,7 +653,7 @@ implements 
 
 getConfiguration
-protected org.apache.hadoop.conf.Configuration getConfiguration()
+protected org.apache.hadoop.conf.Configuration getConfiguration()
 
 
 
@@ -661,7 +662,7 @@ implements 
 
 dump
-protected void dump(org.apache.commons.logging.Log log)
+protected void dump(org.apache.commons.logging.Log log)
 
 
 
@@ -670,7 +671,7 @@ implements 
 
 upsert
-private long upsert(Cell cell,
+private long upsert(Cell cell,
   long readpoint)
 Inserts the specified Cell into MemStore and deletes any 
existing
  versions of the same row/family/qualifier as the specified Cell.
@@ -691,7 +692,7 @@ implements 
 
 getLowest
-protected Cell getLowest(Cell a,
+protected Cell getLowest(Cell a,
  Cell b)
 
 
@@ -701,7 +702,7 @@ implements 
 
 getNextRow
-protected Cell getNextRow(Cell key,
+protected Cell getNextRow(Cell key,
   http://docs.oracle.com/javase/7/docs/api/java/util/NavigableSet.html?is-external=true";
 title="class or interface in java.util">NavigableSet set)
 
 
@@ -711,7 +712,7 @@ implements 
 
 updateColumnValue
-public long updateColumnValue(byte[] row,
+public long updateColumnValue(byte[] row,
  byte[] family,
  byte[] qualifier,
  long newValue,
@@ -734,20 +735,23 @@ implements 
 
 maybeCloneWithAllocator
-private Cell maybeCloneWithAllocator(Cell cell)
+private Cell maybeCloneWithAllocator(Cell cell)
 
 
-
+
 
 
 
 
 internalAdd
-private long internalAdd(Cell toAdd)
+private long internalAdd(Cell toAdd,
+   boolean mslabUsed)
 Internal version of add() that doesn't clone Cells with the
  allocator, and doesn't take the lock.
 
  Callers should ensure they already have the read lock taken
+Parameters:toAdd - 
the cell to addmslabUsed - whether using MSLAB
+Returns:the heap size change in 
bytes
 
 
 
@@ -756,7 +760,7 @@ implements 
 
 setOldestEditTimeToNow
-private void setOldestEditTimeToNow()
+private void setOldestEditTimeToNow()
 
 
 
@@ -765,7 +769,7 @@ implements 
 
 keySize
-protected long keySize()
+protected long keySize()
 
 
 
@@ -774,7 +778,7 @@ implements 
 
 getComparator
-protecte

[10/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/ipc/RpcServer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/RpcServer.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/RpcServer.html
index 01724d2..0e186e9 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/RpcServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/RpcServer.html
@@ -1275,7 +1275,7 @@ implements 
 
 NIO_BUFFER_LIMIT
-private static int NIO_BUFFER_LIMIT
+private static int NIO_BUFFER_LIMIT
 When the read or write buffer size is larger than this 
limit, i/o will be
  done in chunks of this size. Most RPC requests and responses would be
  be smaller.
@@ -1522,7 +1522,7 @@ implements 
 
 logResponse
-void logResponse(com.google.protobuf.Message param,
+void logResponse(com.google.protobuf.Message param,
http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String methodName,
http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String call,
http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String tag,
@@ -1546,7 +1546,7 @@ implements 
 
 stop
-public void stop()
+public void stop()
 Stops the service.  No new calls will be handled after this 
is called.
 
 Specified by:
@@ -1560,7 +1560,7 @@ implements 
 
 join
-public void join()
+public void join()
   throws http://docs.oracle.com/javase/7/docs/api/java/lang/InterruptedException.html?is-external=true";
 title="class or interface in java.lang">InterruptedException
 Wait for the server to be stopped.
  Does not wait for all subthreads to finish.
@@ -1578,7 +1578,7 @@ implements 
 
 getListenerAddress
-public http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true";
 title="class or interface in java.net">InetSocketAddress getListenerAddress()
+public http://docs.oracle.com/javase/7/docs/api/java/net/InetSocketAddress.html?is-external=true";
 title="class or interface in java.net">InetSocketAddress getListenerAddress()
 Return the socket (ip+port) on which the RPC server is 
listening to. May return null if
  the listener channel is closed.
 
@@ -1594,7 +1594,7 @@ implements 
 
 setErrorHandler
-public void setErrorHandler(HBaseRPCErrorHandler handler)
+public void setErrorHandler(HBaseRPCErrorHandler handler)
 Set the handler for calling out of RPC for error 
conditions.
 
 Specified by:
@@ -1608,7 +1608,7 @@ implements 
 
 getErrorHandler
-public HBaseRPCErrorHandler getErrorHandler()
+public HBaseRPCErrorHandler getErrorHandler()
 
 Specified by:
 getErrorHandler in
 interface RpcServerInterface
@@ -1621,7 +1621,7 @@ implements 
 
 getMetrics
-public MetricsHBaseServer getMetrics()
+public MetricsHBaseServer getMetrics()
 Returns the metrics instance for reporting RPC call 
statistics
 
 Specified by:
@@ -1635,7 +1635,7 @@ implements 
 
 addCallSize
-public void addCallSize(long diff)
+public void addCallSize(long diff)
 Description copied from interface: RpcServerInterface
 Add/subtract from the current size of all outstanding 
calls.  Called on setup of a call to add
  call total size and then again at end of a call to remove the call size.
@@ -1651,7 +1651,7 @@ implements 
 
 authorize
-public void authorize(org.apache.hadoop.security.UserGroupInformation user,
+public void authorize(org.apache.hadoop.security.UserGroupInformation user,
  
org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ConnectionHeader connection,
  http://docs.oracle.com/javase/7/docs/api/java/net/InetAddress.html?is-external=true";
 title="class or interface in java.net">InetAddress addr)
throws 
org.apache.hadoop.security.authorize.AuthorizationException
@@ -1667,7 +1667,7 @@ implements 
 
 channelWrite
-protected long channelWrite(http://docs.oracle.com/javase/7/docs/api/java/nio/channels/GatheringByteChannel.html?is-external=true";
 title="class or interface in 
java.nio.channels">GatheringByteChannel channel,
+protected long channelWrite(http://docs.oracle.com/javase/7/docs/api/java/nio/channels/GatheringByteChannel.html?is-external=true";
 title="class or interface in 
java.nio.channels">GatheringByteChannel channel,
 BufferChain bufferChain)
  throws http://docs.oracle.com/javase/7/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 This is a wrapper around http://docs.oracle.com/javase/7/docs/api/java/nio/channels/WritableByteChannel.html?is-external=true#write(java.nio.ByteBuffer)"
 title="class or interface in 
java.nio.channels">WritableByteChannel.write(java.nio.ByteBuffer).
@@ -1688,7 +1688,7 @@ implements 
 
 channelRead
-protected int channelRead(htt

[18/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
index 98007c6..b8db80e 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/class-use/MasterCoprocessorEnvironment.html
@@ -114,104 +114,114 @@
 
 
 void
-MasterObserver.postAbortProcedure(ObserverContext ctx)
-Called after a abortProcedure request has been 
processed.
-
+BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
-BaseMasterAndRegionObserver.postAbortProcedure(ObserverContext ctx) 
+BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
 
 
 void
-BaseMasterObserver.postAbortProcedure(ObserverContext ctx) 
+MasterObserver.postAbortProcedure(ObserverContext ctx)
+Called after a abortProcedure request has been 
processed.
+
 
 
 void
-MasterObserver.postAddColumn(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
-
+Deprecated. 
 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumn(ObserverContext ctx,
+BaseMasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
-Deprecated. 
+Deprecated. 
+As of release 2.0.0, this will be removed in HBase 3.0.0
+ (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
+ Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+
 
 
 
 void
-BaseMasterObserver.postAddColumn(ObserverContext ctx,
+MasterObserver.postAddColumn(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily)
 Deprecated. 
 As of release 2.0.0, this will be removed in HBase 3.0.0
  (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645).
- Use BaseMasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
+ Use MasterObserver.postAddColumnFamily(ObserverContext,
 TableName, HColumnDescriptor).
 
 
 
 
 void
-MasterObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily)
-Called after the new column family has been created.
-
+  HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterAndRegionObserver.postAddColumnFamily(ObserverContext ctx,
+BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
   HColumnDescriptor columnFamily) 
 
 
 void
-BaseMasterObserver.postAddColumnFamily(ObserverContext ctx,
+MasterObserver.postAddColumnFamily(ObserverContext ctx,
   TableName tableName,
-  HColumnDescriptor columnFamily) 
+  HColumnDescriptor columnFamily)
+Called after the new column family has been created.
+
 
 
 void
-MasterObserver.postAddColumnHandler(ObserverContext ctx,
+BaseMasterAndRegionObserver.postAddColumnHandler(ObserverContext ctx,
 TableName tableName,
 HColumnDescriptor columnFamily)
-Deprecated. 
-As of release 2.0.0, this will be removed in HBase 3.0.0
- (https://issues.apache.org/jira/browse/HBASE-13645";>HBASE-13645). Use
- MasterObserver.postCompletedAddColumnFamilyAction(ObserverContext,
 TableName, HColumnDescriptor).
-
+De

[13/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
index 7d65a2f..c8dd598 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/class-use/ImmutableBytesWritable.html
@@ -179,11 +179,9 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter)
-Builds a TableRecordReader.
-
+  
org.apache.hadoop.mapred.Reporter reporter) 
 
 
 org.apache.hadoop.mapred.RecordReader
@@ -193,9 +191,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 org.apache.hadoop.mapred.RecordReader
-MultiTableSnapshotInputFormat.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
+TableInputFormatBase.getRecordReader(org.apache.hadoop.mapred.InputSplit split,
   org.apache.hadoop.mapred.JobConf job,
-  
org.apache.hadoop.mapred.Reporter reporter) 
+  
org.apache.hadoop.mapred.Reporter reporter)
+Builds a TableRecordReader.
+
 
 
 
@@ -214,28 +214,28 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-GroupingTableMap.map(ImmutableBytesWritable key,
+IdentityTableMap.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapred.OutputCollector output,
   org.apache.hadoop.mapred.Reporter reporter)
-Extract the grouping columns from value to construct a new 
key.
+Pass the key, value to reduce
 
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
-  Result values,
+GroupingTableMap.map(ImmutableBytesWritable key,
+  Result value,
   org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter) 
+  org.apache.hadoop.mapred.Reporter reporter)
+Extract the grouping columns from value to construct a new 
key.
+
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-  Result value,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
+  Result values,
   org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
+  org.apache.hadoop.mapred.Reporter reporter) 
 
 
 boolean
@@ -277,28 +277,28 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 void
-GroupingTableMap.map(ImmutableBytesWritable key,
+IdentityTableMap.map(ImmutableBytesWritable key,
   Result value,
   org.apache.hadoop.mapred.OutputCollector output,
   org.apache.hadoop.mapred.Reporter reporter)
-Extract the grouping columns from value to construct a new 
key.
+Pass the key, value to reduce
 
 
 
 void
-RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
-  Result values,
+GroupingTableMap.map(ImmutableBytesWritable key,
+  Result value,
   org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter) 
+  org.apache.hadoop.mapred.Reporter reporter)
+Extract the grouping columns from value to construct a new 
key.
+
 
 
 void
-IdentityTableMap.map(ImmutableBytesWritable key,
-  Result value,
+RowCounter.RowCounterMapper.map(ImmutableBytesWritable row,
+  Result values,
   org.apache.hadoop.mapred.OutputCollector output,
-  org.apache.hadoop.mapred.Reporter reporter)
-Pass the key, value to reduce
-
+  org.apache.hadoop.mapred.Reporter reporter) 
 
 
 void
@@ -345,11 +345,11 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 private ImmutableBytesWritable
-TableRecordReaderImpl.key 
+MultithreadedTableMapper.SubMapRecordReader.key 
 
 
 private ImmutableBytesWritable
-MultithreadedTableMapper.SubMapRecordReader.key 
+TableRecordReaderImpl.key 
 
 
 private ImmutableBytesWritable
@@ -423,27 +423,27 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 ImmutableBytesWritable
-TableSnapshotInputFormatImpl.RecordReader.getCurrentKey() 
-
-
-ImmutableBytesWritable
 TableReco

[01/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site e29c39f24 -> 50e4c45bf


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.Chunk.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.Chunk.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.Chunk.html
index ebbb488..a67b2e8 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.Chunk.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/HeapMemStoreLAB.Chunk.html
@@ -99,7 +99,7 @@
 
 
 
-static class HeapMemStoreLAB.Chunk
+static class HeapMemStoreLAB.Chunk
 extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 A chunk of memory out of which allocations are sliced.
 
@@ -194,18 +194,22 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 
+(package private) int
+getNextFreeOffset() 
+
+
 void
 init()
 Actually claim the memory for this chunk.
 
 
-
+
 (package private) void
 reset()
 Reset the offset to UNINITIALIZED before before reusing an 
old chunk
 
 
-
+
 http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 toString() 
 
@@ -237,7 +241,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 data
-private byte[] data
+private byte[] data
 Actual underlying data
 
 
@@ -247,7 +251,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 UNINITIALIZED
-private static final int UNINITIALIZED
+private static final int UNINITIALIZED
 See Also:Constant
 Field Values
 
 
@@ -257,7 +261,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 OOM
-private static final int OOM
+private static final int OOM
 See Also:Constant
 Field Values
 
 
@@ -267,7 +271,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 nextFreeOffset
-private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicInteger nextFreeOffset
+private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicInteger nextFreeOffset
 Offset for the next allocation, or the sentinel value -1
  which implies that the chunk is still uninitialized.
 
@@ -278,7 +282,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 allocCount
-private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicInteger allocCount
+private http://docs.oracle.com/javase/7/docs/api/java/util/concurrent/atomic/AtomicInteger.html?is-external=true";
 title="class or interface in java.util.concurrent.atomic">AtomicInteger allocCount
 Total number of allocations satisfied from this buffer
 
 
@@ -288,7 +292,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 size
-private final int size
+private final int size
 Size of chunk in bytes
 
 
@@ -306,7 +310,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 HeapMemStoreLAB.Chunk
-HeapMemStoreLAB.Chunk(int size)
+HeapMemStoreLAB.Chunk(int size)
 Create an uninitialized chunk. Note that memory is not 
allocated yet, so
  this is cheap.
 Parameters:size - in 
bytes
@@ -326,7 +330,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 init
-public void init()
+public void init()
 Actually claim the memory for this chunk. This should only 
be called from
  the thread that constructed the chunk. It is thread-safe against other
  threads calling alloc(), who will block until the allocation is 
complete.
@@ -338,7 +342,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 reset
-void reset()
+void reset()
 Reset the offset to UNINITIALIZED before before reusing an 
old chunk
 
 
@@ -348,7 +352,7 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 alloc
-public int alloc(int size)
+public int alloc(int size)
 Try to allocate size bytes from the 
chunk.
 Returns:the offset of the 
successful allocation, or -1 to indicate not-enough-space
 
@@ -356,16 +360,25 @@ extends http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?
 
 
 
-
+
 
 toString
-public http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String toString()
+public http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String toString()
 
 Overrides

[04/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/procedure/class-use/Procedure.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure/class-use/Procedure.html 
b/devapidocs/org/apache/hadoop/hbase/procedure/class-use/Procedure.html
index ac1bfe6..4d79609 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure/class-use/Procedure.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure/class-use/Procedure.html
@@ -147,18 +147,26 @@
 
 
 void
-ZKProcedureCoordinatorRpcs.resetMembers(Procedure proc)
-Delete znodes that are no longer in use.
+ProcedureCoordinatorRpcs.resetMembers(Procedure procName)
+Notify Members to reset the distributed state for 
procedure
 
 
 
 void
-ProcedureCoordinatorRpcs.resetMembers(Procedure procName)
-Notify Members to reset the distributed state for 
procedure
+ZKProcedureCoordinatorRpcs.resetMembers(Procedure proc)
+Delete znodes that are no longer in use.
 
 
 
 void
+ProcedureCoordinatorRpcs.sendAbortToMembers(Procedure procName,
+ForeignException cause)
+Notify the members that the coordinator has aborted the 
procedure and that it should release
+ barrier resources.
+
+
+
+void
 ZKProcedureCoordinatorRpcs.sendAbortToMembers(Procedure proc,
 ForeignException ee)
 This is the abort message being sent by the coordinator to 
member
@@ -167,43 +175,35 @@
  coordinator.
 
 
-
-void
-ProcedureCoordinatorRpcs.sendAbortToMembers(Procedure procName,
-ForeignException cause)
-Notify the members that the coordinator has aborted the 
procedure and that it should release
- barrier resources.
-
-
 
 void
-ZKProcedureCoordinatorRpcs.sendGlobalBarrierAcquire(Procedure proc,
+ProcedureCoordinatorRpcs.sendGlobalBarrierAcquire(Procedure procName,
 byte[] info,
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString> nodeNames)
-The "acquire" phase.
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString> members)
+Notify the members to acquire barrier for the 
procedure
 
 
 
 void
-ProcedureCoordinatorRpcs.sendGlobalBarrierAcquire(Procedure procName,
+ZKProcedureCoordinatorRpcs.sendGlobalBarrierAcquire(Procedure proc,
 byte[] info,
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString> members)
-Notify the members to acquire barrier for the 
procedure
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString> nodeNames)
+The "acquire" phase.
 
 
 
 void
-ZKProcedureCoordinatorRpcs.sendGlobalBarrierReached(Procedure proc,
-http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString> nodeNames) 
-
-
-void
 ProcedureCoordinatorRpcs.sendGlobalBarrierReached(Procedure procName,
 http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString> members)
 Notify members that all members have acquired their parts 
of the barrier and that they can
  now execute under the global barrier.
 
 
+
+void
+ZKProcedureCoordinatorRpcs.sendGlobalBarrierReached(Procedure proc,
+http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">ListString> nodeNames) 
+
 
 (package private) boolean
 ProcedureCoo

[22/52] [partial] hbase-site git commit: Published site at 2650711e944244b3b87e6d6805b7716b216e8786.

2016-07-12 Thread busbey
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
index 2f4c765..728b572 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Consistency.html
@@ -142,19 +142,19 @@ the order they are declared.
 
 
 
+Query
+Query.setConsistency(Consistency consistency)
+Sets the consistency level for this operation
+
+
+
 Scan
 Scan.setConsistency(Consistency consistency) 
 
-
+
 Get
 Get.setConsistency(Consistency consistency) 
 
-
-Query
-Query.setConsistency(Consistency consistency)
-Sets the consistency level for this operation
-
-
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
index 205e022..ccffcfa 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Delete.html
@@ -327,7 +327,7 @@ service.
 
 
 boolean
-Table.checkAndDelete(byte[] row,
+HTable.checkAndDelete(byte[] row,
 byte[] family,
 byte[] qualifier,
 byte[] value,
@@ -338,7 +338,7 @@ service.
 
 
 boolean
-HTable.checkAndDelete(byte[] row,
+Table.checkAndDelete(byte[] row,
 byte[] family,
 byte[] qualifier,
 byte[] value,
@@ -357,7 +357,7 @@ service.
 
 
 boolean
-Table.checkAndDelete(byte[] row,
+HTable.checkAndDelete(byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
@@ -369,7 +369,7 @@ service.
 
 
 boolean
-HTable.checkAndDelete(byte[] row,
+Table.checkAndDelete(byte[] row,
 byte[] family,
 byte[] qualifier,
 CompareFilter.CompareOp compareOp,
@@ -390,13 +390,13 @@ service.
 
 
 void
-Table.delete(Delete delete)
+HTable.delete(Delete delete)
 Deletes the specified cells/row.
 
 
 
 void
-HTable.delete(Delete delete)
+Table.delete(Delete delete)
 Deletes the specified cells/row.
 
 
@@ -415,13 +415,13 @@ service.
 
 
 void
-Table.delete(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List deletes)
+HTable.delete(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List deletes)
 Deletes the specified cells/rows in bulk.
 
 
 
 void
-HTable.delete(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List deletes)
+Table.delete(http://docs.oracle.com/javase/7/docs/api/java/util/List.html?is-external=true";
 title="class or interface in java.util">List deletes)
 Deletes the specified cells/rows in bulk.
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/27849820/devapidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/Durability.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
index 4f6c12d..fc49675 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Durability.html
@@ -241,7 +241,7 @@ the order they are declared.
 
 
 long
-Table.incrementColumnValue(byte[] row,
+HTable.incrementColumnValue(byte[] row,
 byte[] family,
 byte[] qualifier,
 long amount,
@@ -251,7 +251,7 @@ the order they are declared.
 
 
 long
-HTable.incrementColumnValue(byte[] row,
+Table.incrementColumnValue(byte[] row,
 byte[] family,
 byte[] qualifier,
 long amount,
@@ -272,23 +272,23 @@ the order they are declared.
 Delete.setDurability(Durability d) 
 
 
-Put
-Put.setDurability(Durability d) 
-
-
 Mutation
 Mutation.setDurability(Durability d)
 Set the durability for this mutation
 
 
-
-Append
-Append.setDurability(Durability d) 
-
 
 Increment
 Increment.setDurability(Durability d) 
 
+
+Put
+Put.setDurability(Durability 

[4/6] hbase git commit: HBASE-16207 can't restore snapshot without "Admin" permission

2016-07-12 Thread mbertozzi
HBASE-16207 can't restore snapshot without "Admin" permission


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9b5f19ea
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9b5f19ea
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9b5f19ea

Branch: refs/heads/branch-1.2
Commit: 9b5f19eaebfb6099de9edd1204be7da9fc4c6a34
Parents: b96b042
Author: Matteo Bertozzi 
Authored: Tue Jul 12 05:55:07 2016 -0700
Committer: Matteo Bertozzi 
Committed: Tue Jul 12 06:13:56 2016 -0700

--
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java| 2 +-
 .../java/org/apache/hadoop/hbase/master/MasterRpcServices.java   | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9b5f19ea/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 36a0693..da62710 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -2498,7 +2498,7 @@ public class HMaster extends HRegionServer implements 
MasterServices, Server {
* @throws IOException if the namespace manager is not ready yet.
* @throws NamespaceNotFoundException if the namespace does not exists
*/
-  private void ensureNamespaceExists(final String name)
+  protected void ensureNamespaceExists(final String name)
   throws IOException, NamespaceNotFoundException {
 checkNamespaceManagerReady();
 NamespaceDescriptor nsd = tableNamespaceManager.get(name);

http://git-wip-us.apache.org/repos/asf/hbase/blob/9b5f19ea/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index c540a3e..c1472c9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -1258,9 +1258,9 @@ public class MasterRpcServices extends RSRpcServices
   master.checkInitialized();
   master.snapshotManager.checkSnapshotSupport();
 
-// ensure namespace exists
+  // ensure namespace exists
   TableName dstTable = TableName.valueOf(request.getSnapshot().getTable());
-  master.getNamespaceDescriptor(dstTable.getNamespaceAsString());
+  master.ensureNamespaceExists(dstTable.getNamespaceAsString());
 
   SnapshotDescription reqSnapshot = request.getSnapshot();
   master.snapshotManager.restoreSnapshot(reqSnapshot);



[5/6] hbase git commit: HBASE-16207 can't restore snapshot without "Admin" permission

2016-07-12 Thread mbertozzi
HBASE-16207 can't restore snapshot without "Admin" permission


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/67d05745
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/67d05745
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/67d05745

Branch: refs/heads/branch-1.1
Commit: 67d057451d10ec9725445b1230fbfd932a3745a6
Parents: 79b77e3
Author: Matteo Bertozzi 
Authored: Tue Jul 12 05:55:07 2016 -0700
Committer: Matteo Bertozzi 
Committed: Tue Jul 12 06:18:47 2016 -0700

--
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java| 2 +-
 .../java/org/apache/hadoop/hbase/master/MasterRpcServices.java   | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/67d05745/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 90f5295..ee9c052 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -2421,7 +2421,7 @@ public class HMaster extends HRegionServer implements 
MasterServices, Server {
* @throws IOException if the namespace manager is not ready yet.
* @throws NamespaceNotFoundException if the namespace does not exists
*/
-  private void ensureNamespaceExists(final String name)
+  protected void ensureNamespaceExists(final String name)
   throws IOException, NamespaceNotFoundException {
 checkNamespaceManagerReady();
 NamespaceDescriptor nsd = tableNamespaceManager.get(name);

http://git-wip-us.apache.org/repos/asf/hbase/blob/67d05745/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index e16d250..b983fcd 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -1241,9 +1241,9 @@ public class MasterRpcServices extends RSRpcServices
   master.checkInitialized();
   master.snapshotManager.checkSnapshotSupport();
 
-// ensure namespace exists
+  // ensure namespace exists
   TableName dstTable = TableName.valueOf(request.getSnapshot().getTable());
-  master.getNamespaceDescriptor(dstTable.getNamespaceAsString());
+  master.ensureNamespaceExists(dstTable.getNamespaceAsString());
 
   SnapshotDescription reqSnapshot = request.getSnapshot();
   master.snapshotManager.restoreSnapshot(reqSnapshot);



[6/6] hbase git commit: HBASE-16207 can't restore snapshot without "Admin" permission

2016-07-12 Thread mbertozzi
HBASE-16207 can't restore snapshot without "Admin" permission


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a16fb9ee
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a16fb9ee
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a16fb9ee

Branch: refs/heads/0.98
Commit: a16fb9ee0d6a6762604dc8f95334bc7b6b01ece0
Parents: ad064c7
Author: Matteo Bertozzi 
Authored: Tue Jul 12 06:25:32 2016 -0700
Committer: Matteo Bertozzi 
Committed: Tue Jul 12 06:25:32 2016 -0700

--
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a16fb9ee/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index cbecdb5..9571bcb 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -3273,7 +3273,7 @@ MasterServices, Server {
 // ensure namespace exists
 try {
   TableName dstTable = TableName.valueOf(request.getSnapshot().getTable());
-  getNamespaceDescriptor(dstTable.getNamespaceAsString());
+  ensureNamespaceExists(dstTable.getNamespaceAsString());
 } catch (IOException ioe) {
   throw new ServiceException(ioe);
 }



[3/6] hbase git commit: HBASE-16207 can't restore snapshot without "Admin" permission

2016-07-12 Thread mbertozzi
HBASE-16207 can't restore snapshot without "Admin" permission


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/51eee9a2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/51eee9a2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/51eee9a2

Branch: refs/heads/branch-1.3
Commit: 51eee9a22f1398168bedcb19ab6e2b6a5eb7c4bf
Parents: 0fda2bc
Author: Matteo Bertozzi 
Authored: Tue Jul 12 05:55:07 2016 -0700
Committer: Matteo Bertozzi 
Committed: Tue Jul 12 06:04:56 2016 -0700

--
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java| 2 +-
 .../java/org/apache/hadoop/hbase/master/MasterRpcServices.java   | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/51eee9a2/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 99f1e5b..dd4aa8f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -2593,7 +2593,7 @@ public class HMaster extends HRegionServer implements 
MasterServices, Server {
* @throws IOException if the namespace manager is not ready yet.
* @throws NamespaceNotFoundException if the namespace does not exists
*/
-  private void ensureNamespaceExists(final String name)
+  protected void ensureNamespaceExists(final String name)
   throws IOException, NamespaceNotFoundException {
 checkNamespaceManagerReady();
 NamespaceDescriptor nsd = tableNamespaceManager.get(name);

http://git-wip-us.apache.org/repos/asf/hbase/blob/51eee9a2/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index f391ca3..f59009e 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -1275,9 +1275,9 @@ public class MasterRpcServices extends RSRpcServices
   master.checkInitialized();
   master.snapshotManager.checkSnapshotSupport();
 
-// ensure namespace exists
+  // ensure namespace exists
   TableName dstTable = TableName.valueOf(request.getSnapshot().getTable());
-  master.getNamespaceDescriptor(dstTable.getNamespaceAsString());
+  master.ensureNamespaceExists(dstTable.getNamespaceAsString());
 
   SnapshotDescription reqSnapshot = request.getSnapshot();
   master.snapshotManager.restoreSnapshot(reqSnapshot);



[1/6] hbase git commit: HBASE-16207 can't restore snapshot without "Admin" permission

2016-07-12 Thread mbertozzi
Repository: hbase
Updated Branches:
  refs/heads/0.98 ad064c718 -> a16fb9ee0
  refs/heads/branch-1 7fa311a94 -> 36a48a3cf
  refs/heads/branch-1.1 79b77e354 -> 67d057451
  refs/heads/branch-1.2 b96b04204 -> 9b5f19eae
  refs/heads/branch-1.3 0fda2bc9e -> 51eee9a22
  refs/heads/master 7227c27fe -> 2650711e9


HBASE-16207 can't restore snapshot without "Admin" permission


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2650711e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2650711e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2650711e

Branch: refs/heads/master
Commit: 2650711e944244b3b87e6d6805b7716b216e8786
Parents: 7227c27
Author: Matteo Bertozzi 
Authored: Tue Jul 12 05:50:06 2016 -0700
Committer: Matteo Bertozzi 
Committed: Tue Jul 12 05:50:06 2016 -0700

--
 .../java/org/apache/hadoop/hbase/master/MasterRpcServices.java| 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2650711e/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index 2f65e97..ef4688f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -1189,7 +1189,8 @@ public class MasterRpcServices extends RSRpcServices
 
   // Ensure namespace exists. Will throw exception if non-known NS.
   TableName dstTable = TableName.valueOf(request.getSnapshot().getTable());
-  master.getNamespace(dstTable.getNamespaceAsString());
+  master.getClusterSchema().getNamespace(dstTable.getNamespaceAsString());
+
   SnapshotDescription reqSnapshot = request.getSnapshot();
   long procId = master.snapshotManager.restoreOrCloneSnapshot(
 reqSnapshot, request.getNonceGroup(), request.getNonce());



[2/6] hbase git commit: HBASE-16207 can't restore snapshot without "Admin" permission

2016-07-12 Thread mbertozzi
HBASE-16207 can't restore snapshot without "Admin" permission


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/36a48a3c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/36a48a3c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/36a48a3c

Branch: refs/heads/branch-1
Commit: 36a48a3cf9b6989350a9c1d0ad179c3043f95b43
Parents: 7fa311a
Author: Matteo Bertozzi 
Authored: Tue Jul 12 05:55:07 2016 -0700
Committer: Matteo Bertozzi 
Committed: Tue Jul 12 05:55:07 2016 -0700

--
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java| 2 +-
 .../java/org/apache/hadoop/hbase/master/MasterRpcServices.java   | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/36a48a3c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 3a37429..19ca6e8 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -2596,7 +2596,7 @@ public class HMaster extends HRegionServer implements 
MasterServices, Server {
* @throws IOException if the namespace manager is not ready yet.
* @throws NamespaceNotFoundException if the namespace does not exists
*/
-  private void ensureNamespaceExists(final String name)
+  protected void ensureNamespaceExists(final String name)
   throws IOException, NamespaceNotFoundException {
 checkNamespaceManagerReady();
 NamespaceDescriptor nsd = tableNamespaceManager.get(name);

http://git-wip-us.apache.org/repos/asf/hbase/blob/36a48a3c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index 152379d..aee2fbf 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -1277,9 +1277,9 @@ public class MasterRpcServices extends RSRpcServices
   master.checkInitialized();
   master.snapshotManager.checkSnapshotSupport();
 
-// ensure namespace exists
+  // ensure namespace exists
   TableName dstTable = TableName.valueOf(request.getSnapshot().getTable());
-  master.getNamespaceDescriptor(dstTable.getNamespaceAsString());
+  master.ensureNamespaceExists(dstTable.getNamespaceAsString());
 
   SnapshotDescription reqSnapshot = request.getSnapshot();
   master.snapshotManager.restoreSnapshot(reqSnapshot);



hbase git commit: HBASE-16194 Should count in MSLAB chunk allocation into heap size change when adding duplicate cells

2016-07-12 Thread liyu
Repository: hbase
Updated Branches:
  refs/heads/0.98 ecc1a886e -> ad064c718


HBASE-16194 Should count in MSLAB chunk allocation into heap size change when 
adding duplicate cells


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ad064c71
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ad064c71
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ad064c71

Branch: refs/heads/0.98
Commit: ad064c7184ff17792c31318e9c6838cf723f191f
Parents: ecc1a88
Author: Yu Li 
Authored: Tue Jul 12 16:58:23 2016 +0800
Committer: Yu Li 
Committed: Tue Jul 12 16:58:23 2016 +0800

--
 .../hadoop/hbase/regionserver/MemStore.java | 38 +++-
 .../hadoop/hbase/regionserver/MemStoreLAB.java  | 11 ++
 .../hadoop/hbase/regionserver/TestMemStore.java | 19 ++
 3 files changed, 59 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ad064c71/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java
index 549f15e..9c63fc5 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStore.java
@@ -47,6 +47,8 @@ import org.apache.hadoop.hbase.util.ClassSize;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.cloudera.htrace.Trace;
 
+import com.google.common.annotations.VisibleForTesting;
+
 /**
  * The MemStore holds in-memory modifications to the Store.  Modifications
  * are {@link KeyValue}s.  When asked to flush, current memstore is moved
@@ -228,7 +230,8 @@ public class MemStore implements HeapSize {
*/
   long add(final KeyValue kv) {
 KeyValue toAdd = maybeCloneWithAllocator(kv);
-return internalAdd(toAdd);
+boolean mslabUsed = (toAdd != kv);
+return internalAdd(toAdd, mslabUsed);
   }
 
   long timeOfOldestEdit() {
@@ -258,14 +261,34 @@ public class MemStore implements HeapSize {
* allocator, and doesn't take the lock.
*
* Callers should ensure they already have the read lock taken
+   * @param toAdd the kv to add
+   * @param mslabUsed whether MSLAB is used for the kv
+   * @return the heap size change in bytes
*/
-  private long internalAdd(final KeyValue toAdd) {
-long s = heapSizeChange(toAdd, addToKVSet(toAdd));
+  private long internalAdd(final KeyValue toAdd, boolean mslabUsed) {
+boolean notPresent = addToKVSet(toAdd);
+long s = heapSizeChange(toAdd, notPresent);
+// If there's already a same cell in the CellSet and we are using MSLAB, 
we must count in the
+// MSLAB allocation size as well, or else there will be memory leak 
(occupied heap size larger
+// than the counted number)
+if (!notPresent && mslabUsed) {
+  s += getCellLength(toAdd);
+}
 timeRangeTracker.includeTimestamp(toAdd);
 this.size.addAndGet(s);
 return s;
   }
 
+  /**
+   * Get cell length after serialized in {@link KeyValue}
+   * @param cell The cell to get length
+   * @return the serialized cell length
+   */
+  @VisibleForTesting
+  int getCellLength(Cell cell) {
+return KeyValueUtil.length(cell);
+  }
+
   private KeyValue maybeCloneWithAllocator(KeyValue kv) {
 if (allocator == null) {
   return kv;
@@ -320,12 +343,9 @@ public class MemStore implements HeapSize {
* @return approximate size of the passed key and value.
*/
   long delete(final KeyValue delete) {
-long s = 0;
 KeyValue toAdd = maybeCloneWithAllocator(delete);
-s += heapSizeChange(toAdd, addToKVSet(toAdd));
-timeRangeTracker.includeTimestamp(toAdd);
-this.size.addAndGet(s);
-return s;
+boolean mslabUsed = (toAdd != delete);
+return internalAdd(toAdd, mslabUsed);
   }
 
   /**
@@ -564,7 +584,7 @@ public class MemStore implements HeapSize {
 // test that triggers the pathological case if we don't avoid MSLAB
 // here.
 KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
-long addedSize = internalAdd(kv);
+long addedSize = internalAdd(kv, false);
 
 // Get the KeyValues for the row/family/qualifier regardless of timestamp.
 // For this case we want to clean up any other puts

http://git-wip-us.apache.org/repos/asf/hbase/blob/ad064c71/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreLAB.java
index d4e96

hbase git commit: HBASE-16184 Shell test fails due to rLoadSink being nil (Phil Yang)

2016-07-12 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master ccf293d7f -> 7227c27fe


HBASE-16184 Shell test fails due to rLoadSink being nil (Phil Yang)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7227c27f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7227c27f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7227c27f

Branch: refs/heads/master
Commit: 7227c27fe84fe9ca8f6ed1db7151fb96bd58de46
Parents: ccf293d
Author: tedyu 
Authored: Tue Jul 12 00:15:26 2016 -0700
Committer: tedyu 
Committed: Tue Jul 12 00:15:26 2016 -0700

--
 hbase-shell/src/main/ruby/hbase/admin.rb | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7227c27f/hbase-shell/src/main/ruby/hbase/admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb 
b/hbase-shell/src/main/ruby/hbase/admin.rb
index 5f04d1d..c6e6f48 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -722,6 +722,7 @@ module Hbase
   rSinkString   = "   SINK  :"
   rSourceString = "   SOURCE:"
   rLoadSink = sl.getReplicationLoadSink()
+  next if rLoadSink == nil
   rSinkString << " AgeOfLastAppliedOp=" + 
rLoadSink.getAgeOfLastAppliedOp().to_s
   rSinkString << ", TimeStampsOfLastAppliedOp=" +
   
(java.util.Date.new(rLoadSink.getTimeStampsOfLastAppliedOp())).toString()