http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5c5e961f/devapidocs/org/apache/hadoop/hbase/util/IdReadWriteLock.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/IdReadWriteLock.html 
b/devapidocs/org/apache/hadoop/hbase/util/IdReadWriteLock.html
index ae41cca..ac816a8 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/IdReadWriteLock.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/IdReadWriteLock.html
@@ -93,14 +93,14 @@ var activeTableTab = "activeTableTab";
 <!-- ======== START OF CLASS DATA ======== -->
 <div class="header">
 <div class="subTitle">org.apache.hadoop.hbase.util</div>
-<h2 title="Class IdReadWriteLock" class="title">Class IdReadWriteLock</h2>
+<h2 title="Class IdReadWriteLock" class="title">Class 
IdReadWriteLock&lt;T&gt;</h2>
 </div>
 <div class="contentContainer">
 <ul class="inheritance">
 <li><a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object</a></li>
 <li>
 <ul class="inheritance">
-<li>org.apache.hadoop.hbase.util.IdReadWriteLock</li>
+<li>org.apache.hadoop.hbase.util.IdReadWriteLock&lt;T&gt;</li>
 </ul>
 </li>
 </ul>
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 <hr>
 <br>
 <pre>@InterfaceAudience.Private
-public class <a 
href="../../../../../src-html/org/apache/hadoop/hbase/util/IdReadWriteLock.html#line.45">IdReadWriteLock</a>
+public class <a 
href="../../../../../src-html/org/apache/hadoop/hbase/util/IdReadWriteLock.html#line.45">IdReadWriteLock</a>&lt;T&gt;
 extends <a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object</a></pre>
 <div class="block">Allows multiple concurrent clients to lock on a numeric id 
with ReentrantReadWriteLock. The
  intended usage for read lock is as follows:
@@ -164,7 +164,7 @@ extends <a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <th class="colLast" scope="col">Field and Description</th>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><code>private <a 
href="../../../../../org/apache/hadoop/hbase/util/ObjectPool.html" title="class 
in org.apache.hadoop.hbase.util">ObjectPool</a>&lt;<a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long</a>,<a 
href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html?is-external=true";
 title="class or interface in 
java.util.concurrent.locks">ReentrantReadWriteLock</a>&gt;</code></td>
+<td class="colFirst"><code>private <a 
href="../../../../../org/apache/hadoop/hbase/util/ObjectPool.html" title="class 
in org.apache.hadoop.hbase.util">ObjectPool</a>&lt;<a 
href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="type parameter in IdReadWriteLock">T</a>,<a 
href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html?is-external=true";
 title="class or interface in 
java.util.concurrent.locks">ReentrantReadWriteLock</a>&gt;</code></td>
 <td class="colLast"><code><span class="memberNameLink"><a 
href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html#lockPool">lockPool</a></span></code>
 <div class="block">The pool to get entry from, entries are mapped by <a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/ref/Reference.html?is-external=true";
 title="class or interface in java.lang.ref"><code>Reference</code></a> and 
will be automatically
  garbage-collected by JVM</div>
@@ -221,7 +221,7 @@ extends <a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </tr>
 <tr id="i1" class="rowColor">
 <td class="colFirst"><code><a 
href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html?is-external=true";
 title="class or interface in 
java.util.concurrent.locks">ReentrantReadWriteLock</a></code></td>
-<td class="colLast"><code><span class="memberNameLink"><a 
href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html#getLock-long-">getLock</a></span>(long&nbsp;id)</code>
+<td class="colLast"><code><span class="memberNameLink"><a 
href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html#getLock-T-">getLock</a></span>(<a
 href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="type parameter in IdReadWriteLock">T</a>&nbsp;id)</code>
 <div class="block">Get the ReentrantReadWriteLock corresponding to the given 
id</div>
 </td>
 </tr>
@@ -237,7 +237,7 @@ extends <a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 </tr>
 <tr id="i4" class="altColor">
 <td class="colFirst"><code>void</code></td>
-<td class="colLast"><code><span class="memberNameLink"><a 
href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html#waitForWaiters-long-int-">waitForWaiters</a></span>(long&nbsp;id,
+<td class="colLast"><code><span class="memberNameLink"><a 
href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html#waitForWaiters-T-int-">waitForWaiters</a></span>(<a
 href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="type parameter in IdReadWriteLock">T</a>&nbsp;id,
               int&nbsp;numWaiters)</code>&nbsp;</td>
 </tr>
 </table>
@@ -281,7 +281,7 @@ extends <a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <ul class="blockList">
 <li class="blockList">
 <h4>lockPool</h4>
-<pre>private final&nbsp;<a 
href="../../../../../org/apache/hadoop/hbase/util/ObjectPool.html" title="class 
in org.apache.hadoop.hbase.util">ObjectPool</a>&lt;<a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long</a>,<a 
href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html?is-external=true";
 title="class or interface in 
java.util.concurrent.locks">ReentrantReadWriteLock</a>&gt; <a 
href="../../../../../src-html/org/apache/hadoop/hbase/util/IdReadWriteLock.html#line.52">lockPool</a></pre>
+<pre>private final&nbsp;<a 
href="../../../../../org/apache/hadoop/hbase/util/ObjectPool.html" title="class 
in org.apache.hadoop.hbase.util">ObjectPool</a>&lt;<a 
href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="type parameter in IdReadWriteLock">T</a>,<a 
href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html?is-external=true";
 title="class or interface in 
java.util.concurrent.locks">ReentrantReadWriteLock</a>&gt; <a 
href="../../../../../src-html/org/apache/hadoop/hbase/util/IdReadWriteLock.html#line.52">lockPool</a></pre>
 <div class="block">The pool to get entry from, entries are mapped by <a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/ref/Reference.html?is-external=true";
 title="class or interface in java.lang.ref"><code>Reference</code></a> and 
will be automatically
  garbage-collected by JVM</div>
 </li>
@@ -336,13 +336,15 @@ extends <a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <!--   -->
 </a>
 <h3>Method Detail</h3>
-<a name="getLock-long-">
+<a name="getLock-java.lang.Object-">
+<!--   -->
+</a><a name="getLock-T-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>getLock</h4>
-<pre>public&nbsp;<a 
href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html?is-external=true";
 title="class or interface in 
java.util.concurrent.locks">ReentrantReadWriteLock</a>&nbsp;<a 
href="../../../../../src-html/org/apache/hadoop/hbase/util/IdReadWriteLock.html#line.95">getLock</a>(long&nbsp;id)</pre>
+<pre>public&nbsp;<a 
href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html?is-external=true";
 title="class or interface in 
java.util.concurrent.locks">ReentrantReadWriteLock</a>&nbsp;<a 
href="../../../../../src-html/org/apache/hadoop/hbase/util/IdReadWriteLock.html#line.95">getLock</a>(<a
 href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="type parameter in IdReadWriteLock">T</a>&nbsp;id)</pre>
 <div class="block">Get the ReentrantReadWriteLock corresponding to the given 
id</div>
 <dl>
 <dt><span class="paramLabel">Parameters:</span></dt>
@@ -369,13 +371,15 @@ extends <a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 <pre>private&nbsp;void&nbsp;<a 
href="../../../../../src-html/org/apache/hadoop/hbase/util/IdReadWriteLock.html#line.111">gc</a>()</pre>
 </li>
 </ul>
-<a name="waitForWaiters-long-int-">
+<a name="waitForWaiters-java.lang.Object-int-">
+<!--   -->
+</a><a name="waitForWaiters-T-int-">
 <!--   -->
 </a>
 <ul class="blockList">
 <li class="blockList">
 <h4>waitForWaiters</h4>
-<pre>public&nbsp;void&nbsp;<a 
href="../../../../../src-html/org/apache/hadoop/hbase/util/IdReadWriteLock.html#line.116">waitForWaiters</a>(long&nbsp;id,
+<pre>public&nbsp;void&nbsp;<a 
href="../../../../../src-html/org/apache/hadoop/hbase/util/IdReadWriteLock.html#line.116">waitForWaiters</a>(<a
 href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="type parameter in IdReadWriteLock">T</a>&nbsp;id,
                            int&nbsp;numWaiters)
                     throws <a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true";
 title="class or interface in java.lang">InterruptedException</a></pre>
 <dl>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5c5e961f/devapidocs/org/apache/hadoop/hbase/util/class-use/IdReadWriteLock.html
----------------------------------------------------------------------
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/class-use/IdReadWriteLock.html 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/IdReadWriteLock.html
index 9448828..849c7c9 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/class-use/IdReadWriteLock.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/class-use/IdReadWriteLock.html
@@ -89,6 +89,10 @@
  <a href="../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" 
title="interface in 
org.apache.hadoop.hbase.io.hfile"><code>BlockCache</code></a>.</div>
 </td>
 </tr>
+<tr class="rowColor">
+<td class="colFirst"><a 
href="#org.apache.hadoop.hbase.master">org.apache.hadoop.hbase.master</a></td>
+<td class="colLast">&nbsp;</td>
+</tr>
 </tbody>
 </table>
 </li>
@@ -106,7 +110,7 @@
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><code>(package private) <a 
href="../../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="class in org.apache.hadoop.hbase.util">IdReadWriteLock</a></code></td>
+<td class="colFirst"><code>(package private) <a 
href="../../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="class in org.apache.hadoop.hbase.util">IdReadWriteLock</a>&lt;<a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long</a>&gt;</code></td>
 <td class="colLast"><span class="typeNameLabel">BucketCache.</span><code><span 
class="memberNameLink"><a 
href="../../../../../../org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html#offsetLock">offsetLock</a></span></code>
 <div class="block">A ReentrantReadWriteLock to lock on a particular block 
identified by offset.</div>
 </td>
@@ -114,6 +118,24 @@
 </tbody>
 </table>
 </li>
+<li class="blockList"><a name="org.apache.hadoop.hbase.master">
+<!--   -->
+</a>
+<h3>Uses of <a 
href="../../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="class in org.apache.hadoop.hbase.util">IdReadWriteLock</a> in <a 
href="../../../../../../org/apache/hadoop/hbase/master/package-summary.html">org.apache.hadoop.hbase.master</a></h3>
+<table class="useSummary" border="0" cellpadding="3" cellspacing="0" 
summary="Use table, listing fields, and an explanation">
+<caption><span>Fields in <a 
href="../../../../../../org/apache/hadoop/hbase/master/package-summary.html">org.apache.hadoop.hbase.master</a>
 declared as <a 
href="../../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="class in org.apache.hadoop.hbase.util">IdReadWriteLock</a></span><span 
class="tabEnd">&nbsp;</span></caption>
+<tr>
+<th class="colFirst" scope="col">Modifier and Type</th>
+<th class="colLast" scope="col">Field and Description</th>
+</tr>
+<tbody>
+<tr class="altColor">
+<td class="colFirst"><code>private <a 
href="../../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="class in org.apache.hadoop.hbase.util">IdReadWriteLock</a>&lt;<a 
href="../../../../../../org/apache/hadoop/hbase/TableName.html" title="class in 
org.apache.hadoop.hbase">TableName</a>&gt;</code></td>
+<td class="colLast"><span 
class="typeNameLabel">TableStateManager.</span><code><span 
class="memberNameLink"><a 
href="../../../../../../org/apache/hadoop/hbase/master/TableStateManager.html#tnLock">tnLock</a></span></code>&nbsp;</td>
+</tr>
+</tbody>
+</table>
+</li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5c5e961f/devapidocs/org/apache/hadoop/hbase/util/class-use/ObjectPool.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/class-use/ObjectPool.html 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/ObjectPool.html
index 89ae97a..2cd86cc 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/class-use/ObjectPool.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/class-use/ObjectPool.html
@@ -124,7 +124,7 @@
 </tr>
 <tbody>
 <tr class="altColor">
-<td class="colFirst"><code>private <a 
href="../../../../../../org/apache/hadoop/hbase/util/ObjectPool.html" 
title="class in org.apache.hadoop.hbase.util">ObjectPool</a>&lt;<a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true";
 title="class or interface in java.lang">Long</a>,<a 
href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html?is-external=true";
 title="class or interface in 
java.util.concurrent.locks">ReentrantReadWriteLock</a>&gt;</code></td>
+<td class="colFirst"><code>private <a 
href="../../../../../../org/apache/hadoop/hbase/util/ObjectPool.html" 
title="class in org.apache.hadoop.hbase.util">ObjectPool</a>&lt;<a 
href="../../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="type parameter in IdReadWriteLock">T</a>,<a 
href="https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/ReentrantReadWriteLock.html?is-external=true";
 title="class or interface in 
java.util.concurrent.locks">ReentrantReadWriteLock</a>&gt;</code></td>
 <td class="colLast"><span 
class="typeNameLabel">IdReadWriteLock.</span><code><span 
class="memberNameLink"><a 
href="../../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html#lockPool">lockPool</a></span></code>
 <div class="block">The pool to get entry from, entries are mapped by <a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/ref/Reference.html?is-external=true";
 title="class or interface in java.lang.ref"><code>Reference</code></a> and 
will be automatically
  garbage-collected by JVM</div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5c5e961f/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-summary.html 
b/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
index 051c486..9f425bd 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-summary.html
@@ -789,7 +789,7 @@
 </td>
 </tr>
 <tr class="altColor">
-<td class="colFirst"><a 
href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="class in org.apache.hadoop.hbase.util">IdReadWriteLock</a></td>
+<td class="colFirst"><a 
href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="class in org.apache.hadoop.hbase.util">IdReadWriteLock</a>&lt;T&gt;</td>
 <td class="colLast">
 <div class="block">Allows multiple concurrent clients to lock on a numeric id 
with ReentrantReadWriteLock.</div>
 </td>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5c5e961f/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
index 76b0985..e8132f1 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-tree.html
@@ -290,7 +290,7 @@
 </li>
 <li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/IdLock.html" title="class in 
org.apache.hadoop.hbase.util"><span class="typeNameLink">IdLock</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/IdLock.Entry.html" 
title="class in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">IdLock.Entry</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="class in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">IdReadWriteLock</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.html" 
title="class in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">IdReadWriteLock</span></a>&lt;T&gt;</li>
 <li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/ImmutableByteArray.html" 
title="class in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">ImmutableByteArray</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/IncrementingEnvironmentEdge.html"
 title="class in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">IncrementingEnvironmentEdge</span></a> (implements 
org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/EnvironmentEdge.html" 
title="interface in org.apache.hadoop.hbase.util">EnvironmentEdge</a>)</li>
 <li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/JRubyFormat.html" 
title="class in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">JRubyFormat</span></a></li>
@@ -515,14 +515,14 @@
 <ul>
 <li type="circle">java.lang.<a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang"><span 
class="typeNameLink">Enum</span></a>&lt;E&gt; (implements java.lang.<a 
href="https://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable</a>&lt;T&gt;, java.io.<a 
href="https://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable</a>)
 <ul>
+<li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" 
title="enum in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">ChecksumType</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/PrettyPrinter.Unit.html" 
title="enum in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">PrettyPrinter.Unit</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html"
 title="enum in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a>
 (implements org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" 
title="interface in 
org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
-<li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.ReferenceType.html"
 title="enum in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">IdReadWriteLock.ReferenceType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html"
 title="enum in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">HBaseFsck.ErrorReporter.ERROR_CODE</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/PoolMap.PoolType.html" 
title="enum in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">PoolMap.PoolType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/ChecksumType.html" 
title="enum in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">ChecksumType</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html"
 title="enum in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a>
 (implements org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" 
title="interface in 
org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/HBaseFsck.ErrorReporter.ERROR_CODE.html"
 title="enum in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">HBaseFsck.ErrorReporter.ERROR_CODE</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/Order.html" title="enum in 
org.apache.hadoop.hbase.util"><span class="typeNameLink">Order</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.UnsafeComparer.html"
 title="enum in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">Bytes.LexicographicalComparerHolder.UnsafeComparer</span></a>
 (implements org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" 
title="interface in 
org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/Bytes.LexicographicalComparerHolder.PureJavaComparer.html"
 title="enum in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">Bytes.LexicographicalComparerHolder.PureJavaComparer</span></a>
 (implements org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/Bytes.Comparer.html" 
title="interface in 
org.apache.hadoop.hbase.util">Bytes.Comparer</a>&lt;T&gt;)</li>
+<li type="circle">org.apache.hadoop.hbase.util.<a 
href="../../../../../org/apache/hadoop/hbase/util/IdReadWriteLock.ReferenceType.html"
 title="enum in org.apache.hadoop.hbase.util"><span 
class="typeNameLink">IdReadWriteLock.ReferenceType</span></a></li>
 </ul>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5c5e961f/devapidocs/org/apache/hadoop/hbase/util/package-use.html
----------------------------------------------------------------------
diff --git a/devapidocs/org/apache/hadoop/hbase/util/package-use.html 
b/devapidocs/org/apache/hadoop/hbase/util/package-use.html
index a2f0b4e..35ef48d 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/package-use.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/package-use.html
@@ -921,26 +921,31 @@ service.</div>
 </td>
 </tr>
 <tr class="altColor">
+<td class="colOne"><a 
href="../../../../../org/apache/hadoop/hbase/util/class-use/IdReadWriteLock.html#org.apache.hadoop.hbase.master">IdReadWriteLock</a>
+<div class="block">Allows multiple concurrent clients to lock on a numeric id 
with ReentrantReadWriteLock.</div>
+</td>
+</tr>
+<tr class="rowColor">
 <td class="colOne"><a 
href="../../../../../org/apache/hadoop/hbase/util/class-use/JVMClusterUtil.RegionServerThread.html#org.apache.hadoop.hbase.master">JVMClusterUtil.RegionServerThread</a>
 <div class="block">Datastructure to hold RegionServer Thread and RegionServer 
instance</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a 
href="../../../../../org/apache/hadoop/hbase/util/class-use/NonceKey.html#org.apache.hadoop.hbase.master">NonceKey</a>
 <div class="block">This implementation is not smart and just treats nonce 
group and nonce as random bits.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a 
href="../../../../../org/apache/hadoop/hbase/util/class-use/Pair.html#org.apache.hadoop.hbase.master">Pair</a>
 <div class="block">A generic class for pairs.</div>
 </td>
 </tr>
-<tr class="rowColor">
+<tr class="altColor">
 <td class="colOne"><a 
href="../../../../../org/apache/hadoop/hbase/util/class-use/ServerCommandLine.html#org.apache.hadoop.hbase.master">ServerCommandLine</a>
 <div class="block">Base class for command lines that start up various HBase 
daemons.</div>
 </td>
 </tr>
-<tr class="altColor">
+<tr class="rowColor">
 <td class="colOne"><a 
href="../../../../../org/apache/hadoop/hbase/util/class-use/Triple.html#org.apache.hadoop.hbase.master">Triple</a>
 <div class="block">Utility class to manage a triple.</div>
 </td>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5c5e961f/devapidocs/overview-tree.html
----------------------------------------------------------------------
diff --git a/devapidocs/overview-tree.html b/devapidocs/overview-tree.html
index 584ce0f..7ce37bc 100644
--- a/devapidocs/overview-tree.html
+++ b/devapidocs/overview-tree.html
@@ -1981,7 +1981,7 @@
 <li type="circle">org.apache.hadoop.hbase.http.<a 
href="org/apache/hadoop/hbase/http/HttpServerUtil.html" title="class in 
org.apache.hadoop.hbase.http"><span 
class="typeNameLink">HttpServerUtil</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a 
href="org/apache/hadoop/hbase/util/IdLock.html" title="class in 
org.apache.hadoop.hbase.util"><span class="typeNameLink">IdLock</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.util.<a 
href="org/apache/hadoop/hbase/util/IdLock.Entry.html" title="class in 
org.apache.hadoop.hbase.util"><span 
class="typeNameLink">IdLock.Entry</span></a></li>
-<li type="circle">org.apache.hadoop.hbase.util.<a 
href="org/apache/hadoop/hbase/util/IdReadWriteLock.html" title="class in 
org.apache.hadoop.hbase.util"><span 
class="typeNameLink">IdReadWriteLock</span></a></li>
+<li type="circle">org.apache.hadoop.hbase.util.<a 
href="org/apache/hadoop/hbase/util/IdReadWriteLock.html" title="class in 
org.apache.hadoop.hbase.util"><span 
class="typeNameLink">IdReadWriteLock</span></a>&lt;T&gt;</li>
 <li type="circle">org.apache.hadoop.hbase.util.<a 
href="org/apache/hadoop/hbase/util/ImmutableByteArray.html" title="class in 
org.apache.hadoop.hbase.util"><span 
class="typeNameLink">ImmutableByteArray</span></a></li>
 <li type="circle">org.apache.hadoop.hbase.io.<a 
href="org/apache/hadoop/hbase/io/ImmutableBytesWritable.html" title="class in 
org.apache.hadoop.hbase.io"><span 
class="typeNameLink">ImmutableBytesWritable</span></a> (implements 
org.apache.hadoop.io.WritableComparable&lt;T&gt;)</li>
 <li type="circle">org.apache.hadoop.hbase.regionserver.<a 
href="org/apache/hadoop/hbase/regionserver/ImmutableMemStoreLAB.html" 
title="class in org.apache.hadoop.hbase.regionserver"><span 
class="typeNameLink">ImmutableMemStoreLAB</span></a> (implements 
org.apache.hadoop.hbase.regionserver.<a 
href="org/apache/hadoop/hbase/regionserver/MemStoreLAB.html" title="interface 
in org.apache.hadoop.hbase.regionserver">MemStoreLAB</a>)</li>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5c5e961f/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
----------------------------------------------------------------------
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index 8b19b05..f61da78 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
@@ -16,11 +16,11 @@
 <span class="sourceLineNo">008</span>@InterfaceAudience.Private<a 
name="line.8"></a>
 <span class="sourceLineNo">009</span>public class Version {<a 
name="line.9"></a>
 <span class="sourceLineNo">010</span>  public static final String version = 
"3.0.0-SNAPSHOT";<a name="line.10"></a>
-<span class="sourceLineNo">011</span>  public static final String revision = 
"c6ff1de7e2cc08c40785780a4acd65097c8281d9";<a name="line.11"></a>
+<span class="sourceLineNo">011</span>  public static final String revision = 
"397388316ead020d005a33e233364d166d4add00";<a name="line.11"></a>
 <span class="sourceLineNo">012</span>  public static final String user = 
"jenkins";<a name="line.12"></a>
-<span class="sourceLineNo">013</span>  public static final String date = "Thu 
Aug  9 14:39:39 UTC 2018";<a name="line.13"></a>
+<span class="sourceLineNo">013</span>  public static final String date = "Fri 
Aug 10 14:39:22 UTC 2018";<a name="line.13"></a>
 <span class="sourceLineNo">014</span>  public static final String url = 
"git://jenkins-websites1.apache.org/home/jenkins/jenkins-slave/workspace/hbase_generate_website/hbase";<a
 name="line.14"></a>
-<span class="sourceLineNo">015</span>  public static final String srcChecksum 
= "263c29fbcd74c80b3555c88a046ecb4c";<a name="line.15"></a>
+<span class="sourceLineNo">015</span>  public static final String srcChecksum 
= "6c3f408e84717bc608f6c4ea62a7de50";<a name="line.15"></a>
 <span class="sourceLineNo">016</span>}<a name="line.16"></a>
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5c5e961f/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.html
----------------------------------------------------------------------
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.html
index 6f3be84..76ab19e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/encoding/EncodedDataBlock.html
@@ -31,257 +31,290 @@
 <span class="sourceLineNo">023</span>import java.io.IOException;<a 
name="line.23"></a>
 <span class="sourceLineNo">024</span>import java.io.OutputStream;<a 
name="line.24"></a>
 <span class="sourceLineNo">025</span>import java.nio.ByteBuffer;<a 
name="line.25"></a>
-<span class="sourceLineNo">026</span>import java.util.Iterator;<a 
name="line.26"></a>
-<span class="sourceLineNo">027</span><a name="line.27"></a>
-<span class="sourceLineNo">028</span>import 
org.apache.commons.lang3.NotImplementedException;<a name="line.28"></a>
-<span class="sourceLineNo">029</span>import org.apache.hadoop.hbase.Cell;<a 
name="line.29"></a>
-<span class="sourceLineNo">030</span>import 
org.apache.hadoop.hbase.HConstants;<a name="line.30"></a>
-<span class="sourceLineNo">031</span>import 
org.apache.hadoop.hbase.KeyValue;<a name="line.31"></a>
-<span class="sourceLineNo">032</span>import 
org.apache.hadoop.hbase.io.compress.Compression.Algorithm;<a name="line.32"></a>
-<span class="sourceLineNo">033</span>import 
org.apache.hadoop.hbase.io.hfile.HFileContext;<a name="line.33"></a>
-<span class="sourceLineNo">034</span>import 
org.apache.hadoop.hbase.util.ByteBufferUtils;<a name="line.34"></a>
-<span class="sourceLineNo">035</span>import 
org.apache.hadoop.hbase.util.Bytes;<a name="line.35"></a>
-<span class="sourceLineNo">036</span>import org.apache.hadoop.io.IOUtils;<a 
name="line.36"></a>
-<span class="sourceLineNo">037</span>import 
org.apache.hadoop.io.compress.Compressor;<a name="line.37"></a>
-<span class="sourceLineNo">038</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.38"></a>
-<span class="sourceLineNo">039</span><a name="line.39"></a>
-<span class="sourceLineNo">040</span>import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a 
name="line.40"></a>
-<span class="sourceLineNo">041</span>import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a 
name="line.41"></a>
-<span class="sourceLineNo">042</span><a name="line.42"></a>
-<span class="sourceLineNo">043</span>/**<a name="line.43"></a>
-<span class="sourceLineNo">044</span> * Encapsulates a data block compressed 
using a particular encoding algorithm.<a name="line.44"></a>
-<span class="sourceLineNo">045</span> * Useful for testing and benchmarking.<a 
name="line.45"></a>
-<span class="sourceLineNo">046</span> * This is used only in testing.<a 
name="line.46"></a>
-<span class="sourceLineNo">047</span> */<a name="line.47"></a>
-<span class="sourceLineNo">048</span>@InterfaceAudience.Private<a 
name="line.48"></a>
-<span class="sourceLineNo">049</span>@VisibleForTesting<a name="line.49"></a>
-<span class="sourceLineNo">050</span>public class EncodedDataBlock {<a 
name="line.50"></a>
-<span class="sourceLineNo">051</span>  private byte[] rawKVs;<a 
name="line.51"></a>
-<span class="sourceLineNo">052</span>  private ByteBuffer rawBuffer;<a 
name="line.52"></a>
-<span class="sourceLineNo">053</span>  private DataBlockEncoder 
dataBlockEncoder;<a name="line.53"></a>
-<span class="sourceLineNo">054</span><a name="line.54"></a>
-<span class="sourceLineNo">055</span>  private byte[] cachedEncodedData;<a 
name="line.55"></a>
+<span class="sourceLineNo">026</span>import java.util.ArrayList;<a 
name="line.26"></a>
+<span class="sourceLineNo">027</span>import java.util.Iterator;<a 
name="line.27"></a>
+<span class="sourceLineNo">028</span>import java.util.List;<a 
name="line.28"></a>
+<span class="sourceLineNo">029</span><a name="line.29"></a>
+<span class="sourceLineNo">030</span>import 
org.apache.commons.lang3.NotImplementedException;<a name="line.30"></a>
+<span class="sourceLineNo">031</span>import org.apache.hadoop.hbase.Cell;<a 
name="line.31"></a>
+<span class="sourceLineNo">032</span>import 
org.apache.hadoop.hbase.HConstants;<a name="line.32"></a>
+<span class="sourceLineNo">033</span>import 
org.apache.hadoop.hbase.KeyValue;<a name="line.33"></a>
+<span class="sourceLineNo">034</span>import 
org.apache.hadoop.hbase.io.compress.Compression.Algorithm;<a name="line.34"></a>
+<span class="sourceLineNo">035</span>import 
org.apache.hadoop.hbase.io.hfile.HFileContext;<a name="line.35"></a>
+<span class="sourceLineNo">036</span>import 
org.apache.hadoop.hbase.util.ByteBufferUtils;<a name="line.36"></a>
+<span class="sourceLineNo">037</span>import 
org.apache.hadoop.hbase.util.Bytes;<a name="line.37"></a>
+<span class="sourceLineNo">038</span>import org.apache.hadoop.io.IOUtils;<a 
name="line.38"></a>
+<span class="sourceLineNo">039</span>import 
org.apache.hadoop.io.compress.Compressor;<a name="line.39"></a>
+<span class="sourceLineNo">040</span>import 
org.apache.yetus.audience.InterfaceAudience;<a name="line.40"></a>
+<span class="sourceLineNo">041</span><a name="line.41"></a>
+<span class="sourceLineNo">042</span>import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;<a 
name="line.42"></a>
+<span class="sourceLineNo">043</span>import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;<a 
name="line.43"></a>
+<span class="sourceLineNo">044</span><a name="line.44"></a>
+<span class="sourceLineNo">045</span>/**<a name="line.45"></a>
+<span class="sourceLineNo">046</span> * Encapsulates a data block compressed 
using a particular encoding algorithm.<a name="line.46"></a>
+<span class="sourceLineNo">047</span> * Useful for testing and benchmarking.<a 
name="line.47"></a>
+<span class="sourceLineNo">048</span> * This is used only in testing.<a 
name="line.48"></a>
+<span class="sourceLineNo">049</span> */<a name="line.49"></a>
+<span class="sourceLineNo">050</span>@InterfaceAudience.Private<a 
name="line.50"></a>
+<span class="sourceLineNo">051</span>@VisibleForTesting<a name="line.51"></a>
+<span class="sourceLineNo">052</span>public class EncodedDataBlock {<a 
name="line.52"></a>
+<span class="sourceLineNo">053</span>  private byte[] rawKVs;<a 
name="line.53"></a>
+<span class="sourceLineNo">054</span>  private ByteBuffer rawBuffer;<a 
name="line.54"></a>
+<span class="sourceLineNo">055</span>  private DataBlockEncoder 
dataBlockEncoder;<a name="line.55"></a>
 <span class="sourceLineNo">056</span><a name="line.56"></a>
-<span class="sourceLineNo">057</span>  private final HFileBlockEncodingContext 
encodingCtx;<a name="line.57"></a>
-<span class="sourceLineNo">058</span>  private HFileContext meta;<a 
name="line.58"></a>
-<span class="sourceLineNo">059</span><a name="line.59"></a>
-<span class="sourceLineNo">060</span>  /**<a name="line.60"></a>
-<span class="sourceLineNo">061</span>   * Create a buffer which will be 
encoded using dataBlockEncoder.<a name="line.61"></a>
-<span class="sourceLineNo">062</span>   * @param dataBlockEncoder Algorithm 
used for compression.<a name="line.62"></a>
-<span class="sourceLineNo">063</span>   * @param encoding encoding type used<a 
name="line.63"></a>
-<span class="sourceLineNo">064</span>   * @param rawKVs<a name="line.64"></a>
-<span class="sourceLineNo">065</span>   * @param meta<a name="line.65"></a>
-<span class="sourceLineNo">066</span>   */<a name="line.66"></a>
-<span class="sourceLineNo">067</span>  public 
EncodedDataBlock(DataBlockEncoder dataBlockEncoder, DataBlockEncoding 
encoding,<a name="line.67"></a>
-<span class="sourceLineNo">068</span>      byte[] rawKVs, HFileContext meta) 
{<a name="line.68"></a>
-<span class="sourceLineNo">069</span>    
Preconditions.checkNotNull(encoding,<a name="line.69"></a>
-<span class="sourceLineNo">070</span>        "Cannot create encoded data block 
with null encoder");<a name="line.70"></a>
-<span class="sourceLineNo">071</span>    this.dataBlockEncoder = 
dataBlockEncoder;<a name="line.71"></a>
-<span class="sourceLineNo">072</span>    encodingCtx = 
dataBlockEncoder.newDataBlockEncodingContext(encoding,<a name="line.72"></a>
-<span class="sourceLineNo">073</span>        
HConstants.HFILEBLOCK_DUMMY_HEADER, meta);<a name="line.73"></a>
-<span class="sourceLineNo">074</span>    this.rawKVs = rawKVs;<a 
name="line.74"></a>
-<span class="sourceLineNo">075</span>    this.meta = meta;<a 
name="line.75"></a>
-<span class="sourceLineNo">076</span>  }<a name="line.76"></a>
-<span class="sourceLineNo">077</span><a name="line.77"></a>
-<span class="sourceLineNo">078</span>  /**<a name="line.78"></a>
-<span class="sourceLineNo">079</span>   * Provides access to compressed 
value.<a name="line.79"></a>
-<span class="sourceLineNo">080</span>   * @param headerSize header size of the 
block.<a name="line.80"></a>
-<span class="sourceLineNo">081</span>   * @return Forwards sequential 
iterator.<a name="line.81"></a>
-<span class="sourceLineNo">082</span>   */<a name="line.82"></a>
-<span class="sourceLineNo">083</span>  public Iterator&lt;Cell&gt; 
getIterator(int headerSize) {<a name="line.83"></a>
-<span class="sourceLineNo">084</span>    final int rawSize = rawKVs.length;<a 
name="line.84"></a>
-<span class="sourceLineNo">085</span>    byte[] encodedDataWithHeader = 
getEncodedData();<a name="line.85"></a>
-<span class="sourceLineNo">086</span>    int bytesToSkip = headerSize + 
Bytes.SIZEOF_SHORT;<a name="line.86"></a>
-<span class="sourceLineNo">087</span>    ByteArrayInputStream bais = new 
ByteArrayInputStream(encodedDataWithHeader,<a name="line.87"></a>
-<span class="sourceLineNo">088</span>        bytesToSkip, 
encodedDataWithHeader.length - bytesToSkip);<a name="line.88"></a>
-<span class="sourceLineNo">089</span>    final DataInputStream dis = new 
DataInputStream(bais);<a name="line.89"></a>
+<span class="sourceLineNo">057</span>  private byte[] cachedEncodedData;<a 
name="line.57"></a>
+<span class="sourceLineNo">058</span><a name="line.58"></a>
+<span class="sourceLineNo">059</span>  private final HFileBlockEncodingContext 
encodingCtx;<a name="line.59"></a>
+<span class="sourceLineNo">060</span>  private HFileContext meta;<a 
name="line.60"></a>
+<span class="sourceLineNo">061</span><a name="line.61"></a>
+<span class="sourceLineNo">062</span>  private final DataBlockEncoding 
encoding;<a name="line.62"></a>
+<span class="sourceLineNo">063</span><a name="line.63"></a>
+<span class="sourceLineNo">064</span>  // The is for one situation that there 
are some cells includes tags and others are not.<a name="line.64"></a>
+<span class="sourceLineNo">065</span>  // isTagsLenZero stores if cell tags 
length is zero before doing encoding since we need<a name="line.65"></a>
+<span class="sourceLineNo">066</span>  // to check cell tags length is zero or 
not after decoding.<a name="line.66"></a>
+<span class="sourceLineNo">067</span>  // Encoders ROW_INDEX_V1 would abandon 
tags segment if tags is 0 after decode cells to<a name="line.67"></a>
+<span class="sourceLineNo">068</span>  // byte array, other encoders won't do 
that. So we have to find a way to add tagsLen zero<a name="line.68"></a>
+<span class="sourceLineNo">069</span>  // in the decoded byte array.<a 
name="line.69"></a>
+<span class="sourceLineNo">070</span>  private List&lt;Boolean&gt; 
isTagsLenZero = new ArrayList&lt;&gt;();<a name="line.70"></a>
+<span class="sourceLineNo">071</span><a name="line.71"></a>
+<span class="sourceLineNo">072</span>  /**<a name="line.72"></a>
+<span class="sourceLineNo">073</span>   * Create a buffer which will be 
encoded using dataBlockEncoder.<a name="line.73"></a>
+<span class="sourceLineNo">074</span>   * @param dataBlockEncoder Algorithm 
used for compression.<a name="line.74"></a>
+<span class="sourceLineNo">075</span>   * @param encoding encoding type used<a 
name="line.75"></a>
+<span class="sourceLineNo">076</span>   * @param rawKVs<a name="line.76"></a>
+<span class="sourceLineNo">077</span>   * @param meta<a name="line.77"></a>
+<span class="sourceLineNo">078</span>   */<a name="line.78"></a>
+<span class="sourceLineNo">079</span>  public 
EncodedDataBlock(DataBlockEncoder dataBlockEncoder, DataBlockEncoding 
encoding,<a name="line.79"></a>
+<span class="sourceLineNo">080</span>      byte[] rawKVs, HFileContext meta) 
{<a name="line.80"></a>
+<span class="sourceLineNo">081</span>    
Preconditions.checkNotNull(encoding,<a name="line.81"></a>
+<span class="sourceLineNo">082</span>        "Cannot create encoded data block 
with null encoder");<a name="line.82"></a>
+<span class="sourceLineNo">083</span>    this.dataBlockEncoder = 
dataBlockEncoder;<a name="line.83"></a>
+<span class="sourceLineNo">084</span>    this.encoding = encoding;<a 
name="line.84"></a>
+<span class="sourceLineNo">085</span>    encodingCtx = 
dataBlockEncoder.newDataBlockEncodingContext(encoding,<a name="line.85"></a>
+<span class="sourceLineNo">086</span>        
HConstants.HFILEBLOCK_DUMMY_HEADER, meta);<a name="line.86"></a>
+<span class="sourceLineNo">087</span>    this.rawKVs = rawKVs;<a 
name="line.87"></a>
+<span class="sourceLineNo">088</span>    this.meta = meta;<a 
name="line.88"></a>
+<span class="sourceLineNo">089</span>  }<a name="line.89"></a>
 <span class="sourceLineNo">090</span><a name="line.90"></a>
-<span class="sourceLineNo">091</span>    return new Iterator&lt;Cell&gt;() {<a 
name="line.91"></a>
-<span class="sourceLineNo">092</span>      private ByteBuffer decompressedData 
= null;<a name="line.92"></a>
-<span class="sourceLineNo">093</span><a name="line.93"></a>
-<span class="sourceLineNo">094</span>      @Override<a name="line.94"></a>
-<span class="sourceLineNo">095</span>      public boolean hasNext() {<a 
name="line.95"></a>
-<span class="sourceLineNo">096</span>        if (decompressedData == null) {<a 
name="line.96"></a>
-<span class="sourceLineNo">097</span>          return rawSize &gt; 0;<a 
name="line.97"></a>
-<span class="sourceLineNo">098</span>        }<a name="line.98"></a>
-<span class="sourceLineNo">099</span>        return 
decompressedData.hasRemaining();<a name="line.99"></a>
-<span class="sourceLineNo">100</span>      }<a name="line.100"></a>
-<span class="sourceLineNo">101</span><a name="line.101"></a>
-<span class="sourceLineNo">102</span>      @Override<a name="line.102"></a>
-<span class="sourceLineNo">103</span>      public Cell next() {<a 
name="line.103"></a>
-<span class="sourceLineNo">104</span>        if (decompressedData == null) {<a 
name="line.104"></a>
-<span class="sourceLineNo">105</span>          try {<a name="line.105"></a>
-<span class="sourceLineNo">106</span>            decompressedData = 
dataBlockEncoder.decodeKeyValues(dis, dataBlockEncoder<a name="line.106"></a>
-<span class="sourceLineNo">107</span>                
.newDataBlockDecodingContext(meta));<a name="line.107"></a>
-<span class="sourceLineNo">108</span>          } catch (IOException e) {<a 
name="line.108"></a>
-<span class="sourceLineNo">109</span>            throw new 
RuntimeException("Problem with data block encoder, " +<a name="line.109"></a>
-<span class="sourceLineNo">110</span>                "most likely it requested 
more bytes than are available.", e);<a name="line.110"></a>
-<span class="sourceLineNo">111</span>          }<a name="line.111"></a>
-<span class="sourceLineNo">112</span>          decompressedData.rewind();<a 
name="line.112"></a>
-<span class="sourceLineNo">113</span>        }<a name="line.113"></a>
-<span class="sourceLineNo">114</span>        int offset = 
decompressedData.position();<a name="line.114"></a>
-<span class="sourceLineNo">115</span>        int klen = 
decompressedData.getInt();<a name="line.115"></a>
-<span class="sourceLineNo">116</span>        int vlen = 
decompressedData.getInt();<a name="line.116"></a>
-<span class="sourceLineNo">117</span>        int tagsLen = 0;<a 
name="line.117"></a>
-<span class="sourceLineNo">118</span>        
ByteBufferUtils.skip(decompressedData, klen + vlen);<a name="line.118"></a>
-<span class="sourceLineNo">119</span>        // Read the tag length in case 
when steam contain tags<a name="line.119"></a>
-<span class="sourceLineNo">120</span>        if (meta.isIncludesTags()) {<a 
name="line.120"></a>
-<span class="sourceLineNo">121</span>          tagsLen = 
((decompressedData.get() &amp; 0xff) &lt;&lt; 8) ^ (decompressedData.get() 
&amp; 0xff);<a name="line.121"></a>
-<span class="sourceLineNo">122</span>          
ByteBufferUtils.skip(decompressedData, tagsLen);<a name="line.122"></a>
-<span class="sourceLineNo">123</span>        }<a name="line.123"></a>
-<span class="sourceLineNo">124</span>        KeyValue kv = new 
KeyValue(decompressedData.array(), offset,<a name="line.124"></a>
-<span class="sourceLineNo">125</span>            (int) 
KeyValue.getKeyValueDataStructureSize(klen, vlen, tagsLen));<a 
name="line.125"></a>
-<span class="sourceLineNo">126</span>        if (meta.isIncludesMvcc()) {<a 
name="line.126"></a>
-<span class="sourceLineNo">127</span>          long mvccVersion = 
ByteBufferUtils.readVLong(decompressedData);<a name="line.127"></a>
-<span class="sourceLineNo">128</span>          
kv.setSequenceId(mvccVersion);<a name="line.128"></a>
-<span class="sourceLineNo">129</span>        }<a name="line.129"></a>
-<span class="sourceLineNo">130</span>        return kv;<a name="line.130"></a>
-<span class="sourceLineNo">131</span>      }<a name="line.131"></a>
-<span class="sourceLineNo">132</span><a name="line.132"></a>
-<span class="sourceLineNo">133</span>      @Override<a name="line.133"></a>
-<span class="sourceLineNo">134</span>      public void remove() {<a 
name="line.134"></a>
-<span class="sourceLineNo">135</span>        throw new 
NotImplementedException("remove() is not supported!");<a name="line.135"></a>
-<span class="sourceLineNo">136</span>      }<a name="line.136"></a>
-<span class="sourceLineNo">137</span><a name="line.137"></a>
-<span class="sourceLineNo">138</span>      @Override<a name="line.138"></a>
-<span class="sourceLineNo">139</span>      public String toString() {<a 
name="line.139"></a>
-<span class="sourceLineNo">140</span>        return "Iterator of: " + 
dataBlockEncoder.getClass().getName();<a name="line.140"></a>
-<span class="sourceLineNo">141</span>      }<a name="line.141"></a>
-<span class="sourceLineNo">142</span><a name="line.142"></a>
-<span class="sourceLineNo">143</span>    };<a name="line.143"></a>
-<span class="sourceLineNo">144</span>  }<a name="line.144"></a>
-<span class="sourceLineNo">145</span><a name="line.145"></a>
-<span class="sourceLineNo">146</span>  /**<a name="line.146"></a>
-<span class="sourceLineNo">147</span>   * Find the size of minimal buffer that 
could store compressed data.<a name="line.147"></a>
-<span class="sourceLineNo">148</span>   * @return Size in bytes of compressed 
data.<a name="line.148"></a>
-<span class="sourceLineNo">149</span>   */<a name="line.149"></a>
-<span class="sourceLineNo">150</span>  public int getSize() {<a 
name="line.150"></a>
-<span class="sourceLineNo">151</span>    return getEncodedData().length;<a 
name="line.151"></a>
-<span class="sourceLineNo">152</span>  }<a name="line.152"></a>
-<span class="sourceLineNo">153</span><a name="line.153"></a>
-<span class="sourceLineNo">154</span>  /**<a name="line.154"></a>
-<span class="sourceLineNo">155</span>   * Find the size of compressed data 
assuming that buffer will be compressed<a name="line.155"></a>
-<span class="sourceLineNo">156</span>   * using given algorithm.<a 
name="line.156"></a>
-<span class="sourceLineNo">157</span>   * @param algo compression algorithm<a 
name="line.157"></a>
-<span class="sourceLineNo">158</span>   * @param compressor compressor already 
requested from codec<a name="line.158"></a>
-<span class="sourceLineNo">159</span>   * @param inputBuffer Array to be 
compressed.<a name="line.159"></a>
-<span class="sourceLineNo">160</span>   * @param offset Offset to beginning of 
the data.<a name="line.160"></a>
-<span class="sourceLineNo">161</span>   * @param length Length to be 
compressed.<a name="line.161"></a>
-<span class="sourceLineNo">162</span>   * @return Size of compressed data in 
bytes.<a name="line.162"></a>
-<span class="sourceLineNo">163</span>   * @throws IOException<a 
name="line.163"></a>
-<span class="sourceLineNo">164</span>   */<a name="line.164"></a>
-<span class="sourceLineNo">165</span>  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NP_NULL_ON_SOME_PATH_EXCEPTION",<a
 name="line.165"></a>
-<span class="sourceLineNo">166</span>       justification="No sure what 
findbugs wants but looks to me like no NPE")<a name="line.166"></a>
-<span class="sourceLineNo">167</span>  public static int 
getCompressedSize(Algorithm algo, Compressor compressor,<a name="line.167"></a>
-<span class="sourceLineNo">168</span>      byte[] inputBuffer, int offset, int 
length) throws IOException {<a name="line.168"></a>
-<span class="sourceLineNo">169</span><a name="line.169"></a>
-<span class="sourceLineNo">170</span>    // Create streams<a 
name="line.170"></a>
-<span class="sourceLineNo">171</span>    // Storing them so we can close 
them<a name="line.171"></a>
-<span class="sourceLineNo">172</span>    final IOUtils.NullOutputStream 
nullOutputStream = new IOUtils.NullOutputStream();<a name="line.172"></a>
-<span class="sourceLineNo">173</span>    final DataOutputStream 
compressedStream = new DataOutputStream(nullOutputStream);<a 
name="line.173"></a>
-<span class="sourceLineNo">174</span>    OutputStream compressingStream = 
null;<a name="line.174"></a>
+<span class="sourceLineNo">091</span>  /**<a name="line.91"></a>
+<span class="sourceLineNo">092</span>   * Provides access to compressed 
value.<a name="line.92"></a>
+<span class="sourceLineNo">093</span>   * @param headerSize header size of the 
block.<a name="line.93"></a>
+<span class="sourceLineNo">094</span>   * @return Forwards sequential 
iterator.<a name="line.94"></a>
+<span class="sourceLineNo">095</span>   */<a name="line.95"></a>
+<span class="sourceLineNo">096</span>  public Iterator&lt;Cell&gt; 
getIterator(int headerSize) {<a name="line.96"></a>
+<span class="sourceLineNo">097</span>    final int rawSize = rawKVs.length;<a 
name="line.97"></a>
+<span class="sourceLineNo">098</span>    byte[] encodedDataWithHeader = 
getEncodedData();<a name="line.98"></a>
+<span class="sourceLineNo">099</span>    int bytesToSkip = headerSize + 
Bytes.SIZEOF_SHORT;<a name="line.99"></a>
+<span class="sourceLineNo">100</span>    ByteArrayInputStream bais = new 
ByteArrayInputStream(encodedDataWithHeader,<a name="line.100"></a>
+<span class="sourceLineNo">101</span>        bytesToSkip, 
encodedDataWithHeader.length - bytesToSkip);<a name="line.101"></a>
+<span class="sourceLineNo">102</span>    final DataInputStream dis = new 
DataInputStream(bais);<a name="line.102"></a>
+<span class="sourceLineNo">103</span><a name="line.103"></a>
+<span class="sourceLineNo">104</span>    return new Iterator&lt;Cell&gt;() {<a 
name="line.104"></a>
+<span class="sourceLineNo">105</span>      private ByteBuffer decompressedData 
= null;<a name="line.105"></a>
+<span class="sourceLineNo">106</span>      private Iterator&lt;Boolean&gt; it 
= isTagsLenZero.iterator();<a name="line.106"></a>
+<span class="sourceLineNo">107</span><a name="line.107"></a>
+<span class="sourceLineNo">108</span>      @Override<a name="line.108"></a>
+<span class="sourceLineNo">109</span>      public boolean hasNext() {<a 
name="line.109"></a>
+<span class="sourceLineNo">110</span>        if (decompressedData == null) {<a 
name="line.110"></a>
+<span class="sourceLineNo">111</span>          return rawSize &gt; 0;<a 
name="line.111"></a>
+<span class="sourceLineNo">112</span>        }<a name="line.112"></a>
+<span class="sourceLineNo">113</span>        return 
decompressedData.hasRemaining();<a name="line.113"></a>
+<span class="sourceLineNo">114</span>      }<a name="line.114"></a>
+<span class="sourceLineNo">115</span><a name="line.115"></a>
+<span class="sourceLineNo">116</span>      @Override<a name="line.116"></a>
+<span class="sourceLineNo">117</span>      public Cell next() {<a 
name="line.117"></a>
+<span class="sourceLineNo">118</span>        if (decompressedData == null) {<a 
name="line.118"></a>
+<span class="sourceLineNo">119</span>          try {<a name="line.119"></a>
+<span class="sourceLineNo">120</span>            decompressedData = 
dataBlockEncoder.decodeKeyValues(dis, dataBlockEncoder<a name="line.120"></a>
+<span class="sourceLineNo">121</span>                
.newDataBlockDecodingContext(meta));<a name="line.121"></a>
+<span class="sourceLineNo">122</span>          } catch (IOException e) {<a 
name="line.122"></a>
+<span class="sourceLineNo">123</span>            throw new 
RuntimeException("Problem with data block encoder, " +<a name="line.123"></a>
+<span class="sourceLineNo">124</span>                "most likely it requested 
more bytes than are available.", e);<a name="line.124"></a>
+<span class="sourceLineNo">125</span>          }<a name="line.125"></a>
+<span class="sourceLineNo">126</span>          decompressedData.rewind();<a 
name="line.126"></a>
+<span class="sourceLineNo">127</span>        }<a name="line.127"></a>
+<span class="sourceLineNo">128</span>        int offset = 
decompressedData.position();<a name="line.128"></a>
+<span class="sourceLineNo">129</span>        int klen = 
decompressedData.getInt();<a name="line.129"></a>
+<span class="sourceLineNo">130</span>        int vlen = 
decompressedData.getInt();<a name="line.130"></a>
+<span class="sourceLineNo">131</span>        int tagsLen = 0;<a 
name="line.131"></a>
+<span class="sourceLineNo">132</span>        
ByteBufferUtils.skip(decompressedData, klen + vlen);<a name="line.132"></a>
+<span class="sourceLineNo">133</span>        // Read the tag length in case 
when stream contain tags<a name="line.133"></a>
+<span class="sourceLineNo">134</span>        if (meta.isIncludesTags()) {<a 
name="line.134"></a>
+<span class="sourceLineNo">135</span>          boolean noTags = true;<a 
name="line.135"></a>
+<span class="sourceLineNo">136</span>          if (it.hasNext()) {<a 
name="line.136"></a>
+<span class="sourceLineNo">137</span>            noTags = it.next();<a 
name="line.137"></a>
+<span class="sourceLineNo">138</span>          }<a name="line.138"></a>
+<span class="sourceLineNo">139</span>          // ROW_INDEX_V1 will not put 
tagsLen back in cell if it is zero, there is no need<a name="line.139"></a>
+<span class="sourceLineNo">140</span>          // to read short here.<a 
name="line.140"></a>
+<span class="sourceLineNo">141</span>          if 
(!(encoding.equals(DataBlockEncoding.ROW_INDEX_V1) &amp;&amp; noTags)) {<a 
name="line.141"></a>
+<span class="sourceLineNo">142</span>            tagsLen = 
((decompressedData.get() &amp; 0xff) &lt;&lt; 8) ^ (decompressedData.get() 
&amp; 0xff);<a name="line.142"></a>
+<span class="sourceLineNo">143</span>            
ByteBufferUtils.skip(decompressedData, tagsLen);<a name="line.143"></a>
+<span class="sourceLineNo">144</span>          }<a name="line.144"></a>
+<span class="sourceLineNo">145</span>        }<a name="line.145"></a>
+<span class="sourceLineNo">146</span>        KeyValue kv = new 
KeyValue(decompressedData.array(), offset,<a name="line.146"></a>
+<span class="sourceLineNo">147</span>            (int) 
KeyValue.getKeyValueDataStructureSize(klen, vlen, tagsLen));<a 
name="line.147"></a>
+<span class="sourceLineNo">148</span>        if (meta.isIncludesMvcc()) {<a 
name="line.148"></a>
+<span class="sourceLineNo">149</span>          long mvccVersion = 
ByteBufferUtils.readVLong(decompressedData);<a name="line.149"></a>
+<span class="sourceLineNo">150</span>          
kv.setSequenceId(mvccVersion);<a name="line.150"></a>
+<span class="sourceLineNo">151</span>        }<a name="line.151"></a>
+<span class="sourceLineNo">152</span>        return kv;<a name="line.152"></a>
+<span class="sourceLineNo">153</span>      }<a name="line.153"></a>
+<span class="sourceLineNo">154</span><a name="line.154"></a>
+<span class="sourceLineNo">155</span>      @Override<a name="line.155"></a>
+<span class="sourceLineNo">156</span>      public void remove() {<a 
name="line.156"></a>
+<span class="sourceLineNo">157</span>        throw new 
NotImplementedException("remove() is not supported!");<a name="line.157"></a>
+<span class="sourceLineNo">158</span>      }<a name="line.158"></a>
+<span class="sourceLineNo">159</span><a name="line.159"></a>
+<span class="sourceLineNo">160</span>      @Override<a name="line.160"></a>
+<span class="sourceLineNo">161</span>      public String toString() {<a 
name="line.161"></a>
+<span class="sourceLineNo">162</span>        return "Iterator of: " + 
dataBlockEncoder.getClass().getName();<a name="line.162"></a>
+<span class="sourceLineNo">163</span>      }<a name="line.163"></a>
+<span class="sourceLineNo">164</span><a name="line.164"></a>
+<span class="sourceLineNo">165</span>    };<a name="line.165"></a>
+<span class="sourceLineNo">166</span>  }<a name="line.166"></a>
+<span class="sourceLineNo">167</span><a name="line.167"></a>
+<span class="sourceLineNo">168</span>  /**<a name="line.168"></a>
+<span class="sourceLineNo">169</span>   * Find the size of minimal buffer that 
could store compressed data.<a name="line.169"></a>
+<span class="sourceLineNo">170</span>   * @return Size in bytes of compressed 
data.<a name="line.170"></a>
+<span class="sourceLineNo">171</span>   */<a name="line.171"></a>
+<span class="sourceLineNo">172</span>  public int getSize() {<a 
name="line.172"></a>
+<span class="sourceLineNo">173</span>    return getEncodedData().length;<a 
name="line.173"></a>
+<span class="sourceLineNo">174</span>  }<a name="line.174"></a>
 <span class="sourceLineNo">175</span><a name="line.175"></a>
-<span class="sourceLineNo">176</span><a name="line.176"></a>
-<span class="sourceLineNo">177</span>    try {<a name="line.177"></a>
-<span class="sourceLineNo">178</span>      if (compressor != null) {<a 
name="line.178"></a>
-<span class="sourceLineNo">179</span>        compressor.reset();<a 
name="line.179"></a>
-<span class="sourceLineNo">180</span>      }<a name="line.180"></a>
-<span class="sourceLineNo">181</span><a name="line.181"></a>
-<span class="sourceLineNo">182</span>      compressingStream = 
algo.createCompressionStream(compressedStream, compressor, 0);<a 
name="line.182"></a>
-<span class="sourceLineNo">183</span><a name="line.183"></a>
-<span class="sourceLineNo">184</span>      
compressingStream.write(inputBuffer, offset, length);<a name="line.184"></a>
-<span class="sourceLineNo">185</span>      compressingStream.flush();<a 
name="line.185"></a>
-<span class="sourceLineNo">186</span><a name="line.186"></a>
-<span class="sourceLineNo">187</span>      return compressedStream.size();<a 
name="line.187"></a>
-<span class="sourceLineNo">188</span>    } finally {<a name="line.188"></a>
-<span class="sourceLineNo">189</span>      nullOutputStream.close();<a 
name="line.189"></a>
-<span class="sourceLineNo">190</span>      compressedStream.close();<a 
name="line.190"></a>
-<span class="sourceLineNo">191</span>      compressingStream.close();<a 
name="line.191"></a>
-<span class="sourceLineNo">192</span>    }<a name="line.192"></a>
-<span class="sourceLineNo">193</span>  }<a name="line.193"></a>
-<span class="sourceLineNo">194</span><a name="line.194"></a>
-<span class="sourceLineNo">195</span>  /**<a name="line.195"></a>
-<span class="sourceLineNo">196</span>   * Estimate size after second stage of 
compression (e.g. LZO).<a name="line.196"></a>
-<span class="sourceLineNo">197</span>   * @param comprAlgo compression 
algorithm to be used for compression<a name="line.197"></a>
-<span class="sourceLineNo">198</span>   * @param compressor compressor 
corresponding to the given compression<a name="line.198"></a>
-<span class="sourceLineNo">199</span>   *          algorithm<a 
name="line.199"></a>
-<span class="sourceLineNo">200</span>   * @return Size after second stage of 
compression.<a name="line.200"></a>
-<span class="sourceLineNo">201</span>   */<a name="line.201"></a>
-<span class="sourceLineNo">202</span>  public int 
getEncodedCompressedSize(Algorithm comprAlgo,<a name="line.202"></a>
-<span class="sourceLineNo">203</span>      Compressor compressor) throws 
IOException {<a name="line.203"></a>
-<span class="sourceLineNo">204</span>    byte[] compressedBytes = 
getEncodedData();<a name="line.204"></a>
-<span class="sourceLineNo">205</span>    return getCompressedSize(comprAlgo, 
compressor, compressedBytes, 0,<a name="line.205"></a>
-<span class="sourceLineNo">206</span>        compressedBytes.length);<a 
name="line.206"></a>
-<span class="sourceLineNo">207</span>  }<a name="line.207"></a>
+<span class="sourceLineNo">176</span>  /**<a name="line.176"></a>
+<span class="sourceLineNo">177</span>   * Find the size of compressed data 
assuming that buffer will be compressed<a name="line.177"></a>
+<span class="sourceLineNo">178</span>   * using given algorithm.<a 
name="line.178"></a>
+<span class="sourceLineNo">179</span>   * @param algo compression algorithm<a 
name="line.179"></a>
+<span class="sourceLineNo">180</span>   * @param compressor compressor already 
requested from codec<a name="line.180"></a>
+<span class="sourceLineNo">181</span>   * @param inputBuffer Array to be 
compressed.<a name="line.181"></a>
+<span class="sourceLineNo">182</span>   * @param offset Offset to beginning of 
the data.<a name="line.182"></a>
+<span class="sourceLineNo">183</span>   * @param length Length to be 
compressed.<a name="line.183"></a>
+<span class="sourceLineNo">184</span>   * @return Size of compressed data in 
bytes.<a name="line.184"></a>
+<span class="sourceLineNo">185</span>   * @throws IOException<a 
name="line.185"></a>
+<span class="sourceLineNo">186</span>   */<a name="line.186"></a>
+<span class="sourceLineNo">187</span>  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NP_NULL_ON_SOME_PATH_EXCEPTION",<a
 name="line.187"></a>
+<span class="sourceLineNo">188</span>       justification="No sure what 
findbugs wants but looks to me like no NPE")<a name="line.188"></a>
+<span class="sourceLineNo">189</span>  public static int 
getCompressedSize(Algorithm algo, Compressor compressor,<a name="line.189"></a>
+<span class="sourceLineNo">190</span>      byte[] inputBuffer, int offset, int 
length) throws IOException {<a name="line.190"></a>
+<span class="sourceLineNo">191</span><a name="line.191"></a>
+<span class="sourceLineNo">192</span>    // Create streams<a 
name="line.192"></a>
+<span class="sourceLineNo">193</span>    // Storing them so we can close 
them<a name="line.193"></a>
+<span class="sourceLineNo">194</span>    final IOUtils.NullOutputStream 
nullOutputStream = new IOUtils.NullOutputStream();<a name="line.194"></a>
+<span class="sourceLineNo">195</span>    final DataOutputStream 
compressedStream = new DataOutputStream(nullOutputStream);<a 
name="line.195"></a>
+<span class="sourceLineNo">196</span>    OutputStream compressingStream = 
null;<a name="line.196"></a>
+<span class="sourceLineNo">197</span><a name="line.197"></a>
+<span class="sourceLineNo">198</span><a name="line.198"></a>
+<span class="sourceLineNo">199</span>    try {<a name="line.199"></a>
+<span class="sourceLineNo">200</span>      if (compressor != null) {<a 
name="line.200"></a>
+<span class="sourceLineNo">201</span>        compressor.reset();<a 
name="line.201"></a>
+<span class="sourceLineNo">202</span>      }<a name="line.202"></a>
+<span class="sourceLineNo">203</span><a name="line.203"></a>
+<span class="sourceLineNo">204</span>      compressingStream = 
algo.createCompressionStream(compressedStream, compressor, 0);<a 
name="line.204"></a>
+<span class="sourceLineNo">205</span><a name="line.205"></a>
+<span class="sourceLineNo">206</span>      
compressingStream.write(inputBuffer, offset, length);<a name="line.206"></a>
+<span class="sourceLineNo">207</span>      compressingStream.flush();<a 
name="line.207"></a>
 <span class="sourceLineNo">208</span><a name="line.208"></a>
-<span class="sourceLineNo">209</span>  /** @return encoded data with header 
*/<a name="line.209"></a>
-<span class="sourceLineNo">210</span>  private byte[] getEncodedData() {<a 
name="line.210"></a>
-<span class="sourceLineNo">211</span>    if (cachedEncodedData != null) {<a 
name="line.211"></a>
-<span class="sourceLineNo">212</span>      return cachedEncodedData;<a 
name="line.212"></a>
-<span class="sourceLineNo">213</span>    }<a name="line.213"></a>
-<span class="sourceLineNo">214</span>    cachedEncodedData = encodeData();<a 
name="line.214"></a>
-<span class="sourceLineNo">215</span>    return cachedEncodedData;<a 
name="line.215"></a>
-<span class="sourceLineNo">216</span>  }<a name="line.216"></a>
-<span class="sourceLineNo">217</span><a name="line.217"></a>
-<span class="sourceLineNo">218</span>  private ByteBuffer 
getUncompressedBuffer() {<a name="line.218"></a>
-<span class="sourceLineNo">219</span>    if (rawBuffer == null || 
rawBuffer.limit() &lt; rawKVs.length) {<a name="line.219"></a>
-<span class="sourceLineNo">220</span>      rawBuffer = 
ByteBuffer.wrap(rawKVs);<a name="line.220"></a>
-<span class="sourceLineNo">221</span>    }<a name="line.221"></a>
-<span class="sourceLineNo">222</span>    return rawBuffer;<a 
name="line.222"></a>
-<span class="sourceLineNo">223</span>  }<a name="line.223"></a>
-<span class="sourceLineNo">224</span><a name="line.224"></a>
-<span class="sourceLineNo">225</span>  /**<a name="line.225"></a>
-<span class="sourceLineNo">226</span>   * Do the encoding, but do not cache 
the encoded data.<a name="line.226"></a>
-<span class="sourceLineNo">227</span>   * @return encoded data block with 
header and checksum<a name="line.227"></a>
-<span class="sourceLineNo">228</span>   */<a name="line.228"></a>
-<span class="sourceLineNo">229</span>  public byte[] encodeData() {<a 
name="line.229"></a>
-<span class="sourceLineNo">230</span>    ByteArrayOutputStream baos = new 
ByteArrayOutputStream();<a name="line.230"></a>
-<span class="sourceLineNo">231</span>    byte [] baosBytes = null;<a 
name="line.231"></a>
-<span class="sourceLineNo">232</span>    try {<a name="line.232"></a>
-<span class="sourceLineNo">233</span>      
baos.write(HConstants.HFILEBLOCK_DUMMY_HEADER);<a name="line.233"></a>
-<span class="sourceLineNo">234</span>      DataOutputStream out = new 
DataOutputStream(baos);<a name="line.234"></a>
-<span class="sourceLineNo">235</span>      
this.dataBlockEncoder.startBlockEncoding(encodingCtx, out);<a 
name="line.235"></a>
-<span class="sourceLineNo">236</span>      ByteBuffer in = 
getUncompressedBuffer();<a name="line.236"></a>
-<span class="sourceLineNo">237</span>      in.rewind();<a name="line.237"></a>
-<span class="sourceLineNo">238</span>      int klength, vlength;<a 
name="line.238"></a>
-<span class="sourceLineNo">239</span>      int tagsLength = 0;<a 
name="line.239"></a>
-<span class="sourceLineNo">240</span>      long memstoreTS = 0L;<a 
name="line.240"></a>
-<span class="sourceLineNo">241</span>      KeyValue kv = null;<a 
name="line.241"></a>
-<span class="sourceLineNo">242</span>      while (in.hasRemaining()) {<a 
name="line.242"></a>
-<span class="sourceLineNo">243</span>        int kvOffset = in.position();<a 
name="line.243"></a>
-<span class="sourceLineNo">244</span>        klength = in.getInt();<a 
name="line.244"></a>
-<span class="sourceLineNo">245</span>        vlength = in.getInt();<a 
name="line.245"></a>
-<span class="sourceLineNo">246</span>        ByteBufferUtils.skip(in, klength 
+ vlength);<a name="line.246"></a>
-<span class="sourceLineNo">247</span>        if (this.meta.isIncludesTags()) 
{<a name="line.247"></a>
-<span class="sourceLineNo">248</span>          tagsLength = ((in.get() &amp; 
0xff) &lt;&lt; 8) ^ (in.get() &amp; 0xff);<a name="line.248"></a>
-<span class="sourceLineNo">249</span>          ByteBufferUtils.skip(in, 
tagsLength);<a name="line.249"></a>
-<span class="sourceLineNo">250</span>        }<a name="line.250"></a>
-<span class="sourceLineNo">251</span>        if (this.meta.isIncludesMvcc()) 
{<a name="line.251"></a>
-<span class="sourceLineNo">252</span>          memstoreTS = 
ByteBufferUtils.readVLong(in);<a name="line.252"></a>
-<span class="sourceLineNo">253</span>        }<a name="line.253"></a>
-<span class="sourceLineNo">254</span>        kv = new KeyValue(in.array(), 
kvOffset, (int) KeyValue.getKeyValueDataStructureSize(<a name="line.254"></a>
-<span class="sourceLineNo">255</span>            klength, vlength, 
tagsLength));<a name="line.255"></a>
-<span class="sourceLineNo">256</span>        kv.setSequenceId(memstoreTS);<a 
name="line.256"></a>
-<span class="sourceLineNo">257</span>        this.dataBlockEncoder.encode(kv, 
encodingCtx, out);<a name="line.257"></a>
-<span class="sourceLineNo">258</span>      }<a name="line.258"></a>
-<span class="sourceLineNo">259</span>      // Below depends on BAOS internal 
behavior. toByteArray makes a copy of bytes so far.<a name="line.259"></a>
-<span class="sourceLineNo">260</span>      baos.flush();<a name="line.260"></a>
-<span class="sourceLineNo">261</span>      baosBytes = baos.toByteArray();<a 
name="line.261"></a>
-<span class="sourceLineNo">262</span>      
this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, baosBytes);<a 
name="line.262"></a>
-<span class="sourceLineNo">263</span>    } catch (IOException e) {<a 
name="line.263"></a>
-<span class="sourceLineNo">264</span>      throw new 
RuntimeException(String.format(<a name="line.264"></a>
-<span class="sourceLineNo">265</span>          "Bug in encoding part of 
algorithm %s. " +<a name="line.265"></a>
-<span class="sourceLineNo">266</span>          "Probably it requested more 
bytes than are available.",<a name="line.266"></a>
-<span class="sourceLineNo">267</span>          toString()), e);<a 
name="line.267"></a>
-<span class="sourceLineNo">268</span>    }<a name="line.268"></a>
-<span class="sourceLineNo">269</span>    return baosBytes;<a 
name="line.269"></a>
-<span class="sourceLineNo">270</span>  }<a name="line.270"></a>
-<span class="sourceLineNo">271</span><a name="line.271"></a>
-<span class="sourceLineNo">272</span>  @Override<a name="line.272"></a>
-<span class="sourceLineNo">273</span>  public String toString() {<a 
name="line.273"></a>
-<span class="sourceLineNo">274</span>    return dataBlockEncoder.toString();<a 
name="line.274"></a>
-<span class="sourceLineNo">275</span>  }<a name="line.275"></a>
-<span class="sourceLineNo">276</span>}<a name="line.276"></a>
+<span class="sourceLineNo">209</span>      return compressedStream.size();<a 
name="line.209"></a>
+<span class="sourceLineNo">210</span>    } finally {<a name="line.210"></a>
+<span class="sourceLineNo">211</span>      nullOutputStream.close();<a 
name="line.211"></a>
+<span class="sourceLineNo">212</span>      compressedStream.close();<a 
name="line.212"></a>
+<span class="sourceLineNo">213</span>      compressingStream.close();<a 
name="line.213"></a>
+<span class="sourceLineNo">214</span>    }<a name="line.214"></a>
+<span class="sourceLineNo">215</span>  }<a name="line.215"></a>
+<span class="sourceLineNo">216</span><a name="line.216"></a>
+<span class="sourceLineNo">217</span>  /**<a name="line.217"></a>
+<span class="sourceLineNo">218</span>   * Estimate size after second stage of 
compression (e.g. LZO).<a name="line.218"></a>
+<span class="sourceLineNo">219</span>   * @param comprAlgo compression 
algorithm to be used for compression<a name="line.219"></a>
+<span class="sourceLineNo">220</span>   * @param compressor compressor 
corresponding to the given compression<a name="line.220"></a>
+<span class="sourceLineNo">221</span>   *          algorithm<a 
name="line.221"></a>
+<span class="sourceLineNo">222</span>   * @return Size after second stage of 
compression.<a name="line.222"></a>
+<span class="sourceLineNo">223</span>   */<a name="line.223"></a>
+<span class="sourceLineNo">224</span>  public int 
getEncodedCompressedSize(Algorithm comprAlgo,<a name="line.224"></a>
+<span class="sourceLineNo">225</span>      Compressor compressor) throws 
IOException {<a name="line.225"></a>
+<span class="sourceLineNo">226</span>    byte[] compressedBytes = 
getEncodedData();<a name="line.226"></a>
+<span class="sourceLineNo">227</span>    return getCompressedSize(comprAlgo, 
compressor, compressedBytes, 0,<a name="line.227"></a>
+<span class="sourceLineNo">228</span>        compressedBytes.length);<a 
name="line.228"></a>
+<span class="sourceLineNo">229</span>  }<a name="line.229"></a>
+<span class="sourceLineNo">230</span><a name="line.230"></a>
+<span class="sourceLineNo">231</span>  /** @return encoded data with header 
*/<a name="line.231"></a>
+<span class="sourceLineNo">232</span>  private byte[] getEncodedData() {<a 
name="line.232"></a>
+<span class="sourceLineNo">233</span>    if (cachedEncodedData != null) {<a 
name="line.233"></a>
+<span class="sourceLineNo">234</span>      return cachedEncodedData;<a 
name="line.234"></a>
+<span class="sourceLineNo">235</span>    }<a name="line.235"></a>
+<span class="sourceLineNo">236</span>    cachedEncodedData = encodeData();<a 
name="line.236"></a>
+<span class="sourceLineNo">237</span>    return cachedEncodedData;<a 
name="line.237"></a>
+<span class="sourceLineNo">238</span>  }<a name="line.238"></a>
+<span class="sourceLineNo">239</span><a name="line.239"></a>
+<span class="sourceLineNo">240</span>  private ByteBuffer 
getUncompressedBuffer() {<a name="line.240"></a>
+<span class="sourceLineNo">241</span>    if (rawBuffer == null || 
rawBuffer.limit() &lt; rawKVs.length) {<a name="line.241"></a>
+<span class="sourceLineNo">242</span>      rawBuffer = 
ByteBuffer.wrap(rawKVs);<a name="line.242"></a>
+<span class="sourceLineNo">243</span>    }<a name="line.243"></a>
+<span class="sourceLineNo">244</span>    return rawBuffer;<a 
name="line.244"></a>
+<span class="sourceLineNo">245</span>  }<a name="line.245"></a>
+<span class="sourceLineNo">246</span><a name="line.246"></a>
+<span class="sourceLineNo">247</span>  /**<a name="line.247"></a>
+<span class="sourceLineNo">248</span>   * Do the encoding, but do not cache 
the encoded data.<a name="line.248"></a>
+<span class="sourceLineNo">249</span>   * @return encoded data block with 
header and checksum<a name="line.249"></a>
+<span class="sourceLineNo">250</span>   */<a name="line.250"></a>
+<span class="sourceLineNo">251</span>  public byte[] encodeData() {<a 
name="line.251"></a>
+<span class="sourceLineNo">252</span>    ByteArrayOutputStream baos = new 
ByteArrayOutputStream();<a name="line.252"></a>
+<span class="sourceLineNo">253</span>    byte [] baosBytes = null;<a 
name="line.253"></a>
+<span class="sourceLineNo">254</span>    try {<a name="line.254"></a>
+<span class="sourceLineNo">255</span>      
baos.write(HConstants.HFILEBLOCK_DUMMY_HEADER);<a name="line.255"></a>
+<span class="sourceLineNo">256</span>      DataOutputStream out = new 
DataOutputStream(baos);<a name="line.256"></a>
+<span class="sourceLineNo">257</span>      
this.dataBlockEncoder.startBlockEncoding(encodingCtx, out);<a 
name="line.257"></a>
+<span class="sourceLineNo">258</span>      ByteBuffer in = 
getUncompressedBuffer();<a name="line.258"></a>
+<span class="sourceLineNo">259</span>      in.rewind();<a name="line.259"></a>
+<span class="sourceLineNo">260</span>      int klength, vlength;<a 
name="line.260"></a>
+<span class="sourceLineNo">261</span>      int tagsLength = 0;<a 
name="line.261"></a>
+<span class="sourceLineNo">262</span>      long memstoreTS = 0L;<a 
name="line.262"></a>
+<span class="sourceLineNo">263</span>      KeyValue kv = null;<a 
name="line.263"></a>
+<span class="sourceLineNo">264</span>      while (in.hasRemaining()) {<a 
name="line.264"></a>
+<span class="sourceLineNo">265</span>        int kvOffset = in.position();<a 
name="line.265"></a>
+<span class="sourceLineNo">266</span>        klength = in.getInt();<a 
name="line.266"></a>
+<span class="sourceLineNo">267</span>        vlength = in.getInt();<a 
name="line.267"></a>
+<span class="sourceLineNo">268</span>        ByteBufferUtils.skip(in, klength 
+ vlength);<a name="line.268"></a>
+<span class="sourceLineNo">269</span>        if (this.meta.isIncludesTags()) 
{<a name="line.269"></a>
+<span class="sourceLineNo">270</span>          tagsLength = ((in.get() &amp; 
0xff) &lt;&lt; 8) ^ (in.get() &amp; 0xff);<a name="line.270"></a>
+<span class="sourceLineNo">271</span>          ByteBufferUtils.skip(in, 
tagsLength);<a name="line.271"></a>
+<span class="sourceLineNo">272</span>          
this.isTagsLenZero.add(tagsLength == 0);<a name="line.272"></a>
+<span class="sourceLineNo">273</span>        }<a name="line.273"></a>
+<span class="sourceLineNo">274</span>        if (this.meta.isIncludesMvcc()) 
{<a name="line.274"></a>
+<span class="sourceLineNo">275</span>          memstoreTS = 
ByteBufferUtils.readVLong(in);<a name="line.275"></a>
+<span class="sourceLineNo">276</span>        }<a name="line.276"></a>
+<span class="sourceLineNo">277</span>        kv = new KeyValue(in.array(), 
kvOffset, (int) KeyValue.getKeyValueDataStructureSize(<a name="line.277"></a>
+<span class="sourceLineNo">278</span>            klength, vlength, 
tagsLength));<a name="line.278"></a>
+<span class="sourceLineNo">279</span>        kv.setSequenceId(memstoreTS);<a 
name="line.279"></a>
+<span class="sourceLineNo">280</span>        this.dataBlockEncoder.encode(kv, 
encodingCtx, out);<a name="line.280"></a>
+<span class="sourceLineNo">281</span>      }<a name="line.281"></a>
+<span class="sourceLineNo">282</span>      // Below depends on BAOS internal 
behavior. toByteArray makes a copy of bytes so far.<a name="line.282"></a>
+<span class="sourceLineNo">283</span>      baos.flush();<a name="line.283"></a>
+<span class="sourceLineNo">284</span>      baosBytes = baos.toByteArray();<a 
name="line.284"></a>
+<span class="sourceLineNo">285</span>      
this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, baosBytes);<a 
name="line.285"></a>
+<span class="sourceLineNo">286</span>      // In endBlockEncoding(encodingCtx, 
out, baosBytes), Encoder ROW_INDEX_V1 write integer in<a name="line.286"></a>
+<span class="sourceLineNo">287</span>      // out while the others write 
integer in baosBytes(byte array). We need to add<a name="line.287"></a>
+<span class="sourceLineNo">288</span>      // baos.toByteArray() after 
endBlockEncoding again to make sure the integer writes in<a name="line.288"></a>
+<span class="sourceLineNo">289</span>      // outputstream with Encoder 
ROW_INDEX_V1 dump to byte array (baosBytes).<a name="line.289"></a>
+<span class="sourceLineNo">290</span>      // The if branch is necessary 
because Encoders excepts ROW_INDEX_V1 write integer in<a name="line.290"></a>
+<span class="sourceLineNo">291</span>      // baosBytes directly, without if 
branch and do toByteArray() again, baosBytes won't<a name="line.291"></a>
+<span class="sourceLineNo">292</span>      // contains the integer wrotten in 
endBlockEncoding.<a name="line.292"></a>
+<span class="sourceLineNo">293</span>      if 
(this.encoding.equals(DataBlockEncoding.ROW_INDEX_V1)) {<a name="line.293"></a>
+<span class="sourceLineNo">294</span>        baosBytes = baos.toByteArray();<a 
name="line.294"></a>
+<span class="sourceLineNo">295</span>      }<a name="line.295"></a>
+<span class="sourceLineNo">296</span>    } catch (IOException e) {<a 
name="line.296"></a>
+<span class="sourceLineNo">297</span>      throw new 
RuntimeException(String.format(<a name="line.297"></a>
+<span class="sourceLineNo">298</span>          "Bug in encoding part of 
algorithm %s. " +<a name="line.298"></a>
+<span class="sourceLineNo">299</span>          "Probably it requested more 
bytes than are available.",<a name="line.299"></a>
+<span class="sourceLineNo">300</span>          toString()), e);<a 
name="line.300"></a>
+<span class="sourceLineNo">301</span>    }<a name="line.301"></a>
+<span class="sourceLineNo">302</span>    return baosBytes;<a 
name="line.302"></a>
+<span class="sourceLineNo">303</span>  }<a name="line.303"></a>
+<span class="sourceLineNo">304</span><a name="line.304"></a>
+<span class="sourceLineNo">305</span>  @Override<a name="line.305"></a>
+<span class="sourceLineNo">306</span>  public String toString() {<a 
name="line.306"></a>
+<span class="sourceLineNo">307</span>    return encoding.name();<a 
name="line.307"></a>
+<span class="sourceLineNo">308</span>  }<a name="line.308"></a>
+<span class="sourceLineNo">309</span>}<a name="line.309"></a>
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5c5e961f/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
----------------------------------------------------------------------
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
index 21e240a..794e33b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
@@ -209,7 +209,7 @@
 <span class="sourceLineNo">201</span>   * Key set of offsets in BucketCache is 
limited so soft reference is the best choice here.<a name="line.201"></a>
 <span class="sourceLineNo">202</span>   */<a name="line.202"></a>
 <span class="sourceLineNo">203</span>  @VisibleForTesting<a 
name="line.203"></a>
-<span class="sourceLineNo">204</span>  final IdReadWriteLock offsetLock = new 
IdReadWriteLock(ReferenceType.SOFT);<a name="line.204"></a>
+<span class="sourceLineNo">204</span>  final IdReadWriteLock&lt;Long&gt; 
offsetLock = new IdReadWriteLock&lt;&gt;(ReferenceType.SOFT);<a 
name="line.204"></a>
 <span class="sourceLineNo">205</span><a name="line.205"></a>
 <span class="sourceLineNo">206</span>  private final 
NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.206"></a>
 <span class="sourceLineNo">207</span>      new 
ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a 
name="line.207"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5c5e961f/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
----------------------------------------------------------------------
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
index 21e240a..794e33b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
@@ -209,7 +209,7 @@
 <span class="sourceLineNo">201</span>   * Key set of offsets in BucketCache is 
limited so soft reference is the best choice here.<a name="line.201"></a>
 <span class="sourceLineNo">202</span>   */<a name="line.202"></a>
 <span class="sourceLineNo">203</span>  @VisibleForTesting<a 
name="line.203"></a>
-<span class="sourceLineNo">204</span>  final IdReadWriteLock offsetLock = new 
IdReadWriteLock(ReferenceType.SOFT);<a name="line.204"></a>
+<span class="sourceLineNo">204</span>  final IdReadWriteLock&lt;Long&gt; 
offsetLock = new IdReadWriteLock&lt;&gt;(ReferenceType.SOFT);<a 
name="line.204"></a>
 <span class="sourceLineNo">205</span><a name="line.205"></a>
 <span class="sourceLineNo">206</span>  private final 
NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.206"></a>
 <span class="sourceLineNo">207</span>      new 
ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a 
name="line.207"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5c5e961f/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
----------------------------------------------------------------------
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
index 21e240a..794e33b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
@@ -209,7 +209,7 @@
 <span class="sourceLineNo">201</span>   * Key set of offsets in BucketCache is 
limited so soft reference is the best choice here.<a name="line.201"></a>
 <span class="sourceLineNo">202</span>   */<a name="line.202"></a>
 <span class="sourceLineNo">203</span>  @VisibleForTesting<a 
name="line.203"></a>
-<span class="sourceLineNo">204</span>  final IdReadWriteLock offsetLock = new 
IdReadWriteLock(ReferenceType.SOFT);<a name="line.204"></a>
+<span class="sourceLineNo">204</span>  final IdReadWriteLock&lt;Long&gt; 
offsetLock = new IdReadWriteLock&lt;&gt;(ReferenceType.SOFT);<a 
name="line.204"></a>
 <span class="sourceLineNo">205</span><a name="line.205"></a>
 <span class="sourceLineNo">206</span>  private final 
NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.206"></a>
 <span class="sourceLineNo">207</span>      new 
ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a 
name="line.207"></a>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/5c5e961f/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html
----------------------------------------------------------------------
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html
index 21e240a..794e33b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.SharedMemoryBucketEntry.html
@@ -209,7 +209,7 @@
 <span class="sourceLineNo">201</span>   * Key set of offsets in BucketCache is 
limited so soft reference is the best choice here.<a name="line.201"></a>
 <span class="sourceLineNo">202</span>   */<a name="line.202"></a>
 <span class="sourceLineNo">203</span>  @VisibleForTesting<a 
name="line.203"></a>
-<span class="sourceLineNo">204</span>  final IdReadWriteLock offsetLock = new 
IdReadWriteLock(ReferenceType.SOFT);<a name="line.204"></a>
+<span class="sourceLineNo">204</span>  final IdReadWriteLock&lt;Long&gt; 
offsetLock = new IdReadWriteLock&lt;&gt;(ReferenceType.SOFT);<a 
name="line.204"></a>
 <span class="sourceLineNo">205</span><a name="line.205"></a>
 <span class="sourceLineNo">206</span>  private final 
NavigableSet&lt;BlockCacheKey&gt; blocksByHFile =<a name="line.206"></a>
 <span class="sourceLineNo">207</span>      new 
ConcurrentSkipListSet&lt;&gt;(new Comparator&lt;BlockCacheKey&gt;() {<a 
name="line.207"></a>

Reply via email to