svn commit: r23919 - /release/hbase/KEYS

2017-12-27 Thread stack
Author: stack
Date: Wed Dec 27 16:15:51 2017
New Revision: 23919

Log:
Add Mike Drob Key

Modified:
release/hbase/KEYS

Modified: release/hbase/KEYS
==
--- release/hbase/KEYS (original)
+++ release/hbase/KEYS Wed Dec 27 16:15:51 2017
@@ -978,3 +978,100 @@ IP9wzT5o/UOKSxqqYmsgSoSEJFwO5adAkLyW8Ci8
 bFBjjz8VFd1cF4F3MIrVi3UN/Knw9EIWcPqJeB2eDm6TIN3UmJSyFObAGQZmZW1N
 =yX4c
 -END PGP PUBLIC KEY BLOCK-
+ASF ID: mdrob
+LDAP PGP key: 86ED B9C3 3B85 1722 8E88  A8F9 3E48 C0C6 EF36 2B9E
+
+86ED B9C3 3B85 1722 8E88  A8F9 3E48 C0C6 EF36 2B9E
+-BEGIN PGP PUBLIC KEY BLOCK-
+Version: GnuPG v1
+
+mQINBFICr4IBEADc9j0fYpDAhSpQjhtPzxRq9fWQXsFCR6jRhijHmfE9YqoaK0y5
+ZJ0e7sziSi/B72MttbOwm4rvYZbVKhPW8W1K8nYqqjV7P+qn6se5tKiW6b0HzhJm
+jZD+ZAPpEt2qi2geoBX0LrJgtZjp1CyJ8Z0BtmGdmJz0epWv/NHtpdijzAMv4OsT
+vRxez/ULGW21twHon67sUYjeEhib3JR3WtRGzELYwbI0plCfAdotRoEwIVZsQBJk
+cUhS5LQa2iT5JD+FNeM1y2dbGYMKePYLTFqqC6fVto8q449tJosTZ3FcWrxeZwsF
+p+HfPLLxJvo5CXs4fzSzaZ77hia0+miBzBi6d4jK5aVqrEUh74jDTnsI1eU96sGt
+gehpcOvIhOlsbd98FDm75+evu6RtFFDI5dOquUhpMk14gwsXznoFZHLKR08d2TGb
+NRH260mtpv7qwSgTxgyVmdMG6eQImJIwt7ekl0p7AjCssYHsU2hWxGlO/0eYYf85
+sH9vNWAO/h0yqWSNzlYNMcdV1QiTq7AVJI6ViZ0HquHEKXtJWpcCC+WzmvzlkYEV
+UGGcIvlEE+X2kWhbpoMljK4HVVmxhpHs6l+20gVxLEyqsA8dR7BX+CQgz6PcFOTD
+vlXET3RBnCZh2gy2INgoYF42agA6jPPTm/SHHeblYs7c13/4ZUAvO30D5wARAQAB
+tC9NaWtlIERyb2IgKENPREUgU0lHTklORyBLRVkpIDxtZHJvYkBhcGFjaGUub3Jn
+PokBHAQQAQIABgUCVGJ4FwAKCRCOXXo4Pdgxf386CACAG3HsjlEcMETejehPJzKh
+YpZ5DTLVxLYEynlCOOPW2dWss/1kw1DJCMlMW98JRDn9zlxi3BpJ/irU/oZecJ91
+ZC3/STWG08ONH+0aBJFXM8ksdcHbGt1T+ukYGOS1QCmGiloP9/wAbuYo1+5PM0Yk
+CGYJpkk0SkrZ1aVmMllOvOiOtELKKn3pmQi5bQTQFeV9Cmt8cW64+XAp1x2rocPh
+C42o0m4duOokAgDoOBOyIkigC8Gc5yUt9tfNS6k4kBFWxdEg3hzdn63V2VMBYS1M
+VGpkKz/5RpC8kIFkd1d9AVEh2y348SC1WgXpxfkHF8kEMAhHFpH9XzLE2WfmBjEy
+iQIcBBABAgAGBQJSBRMKAAoJECUskCiruopJj3kP/igeSHhSmGbt69G2jIMAXajZ
+NpTOQyRRY9fjrmxoxAqIBB/nOj0tOfuBAD/k5LU5PXRu66fD3BC9fd5iQ+R05IWk
+/e6rRV4nah2WUP3dYtHTbxzWs/CqyMgvphfBty5klJF0VFVxeI3JK/7xzxGWhY1w
+34jNIpo+FiBf5w5RosbKTt1pCtKRW0gsgHf+JF54H532gbFLD+NIveO7Z8o3tsxT
+SbAOYWljVipySxSzSNbByBgy5NrCSsuqJ8QFWMfnGJDtBjK4Icbs7jVm9n8TB4nk
+3ssrJZe+TaQAQW1263L6mFMqN2M6kTTdPECGizExkZX98sfu0uHxPPbi5trxughj
+r1R6PW7TK7FiOWAjwizeXpvLwauXpbXkyXbQeKfA36znAU0vz1Hbq58pfEONKdDS
+e4tMzKPuEaQiMx5hbJyE92wBEwt7ejYes+JQDT8r/rpQpc0XFHDDNbX+p24Ymt+N
+eJ92kn1LPwM6o4Y5nFy4jlGHmYTzO/Y+VyFBZmFSvmnNyhe6gvk6VFN+8lEXtdRY
+7fTv8PsFIfjxEcll9VPd6+XPtv71j/0l05E/zJLSWayt0Ano4sCXv2cqyeQhCoS3
+zIGHx7D58Ukz/smeP5LzOcUhs3C6UYu2JgrP+20O0Zclq1s1eMOMrFq9n6psRSdF
+hYgk8JQyo7VE2h/n50nniQIcBBABAgAGBQJSBRzwAAoJEJqs+1Y1Ks92pqkP/3y8
+yyH4cBwKOX3owcPAkaE137udkhzXofWzjTkvKF8eCto3cqmumzDNxBwF6L383nUO
+WocNCvmhaM6jA/jeVgzouZNrC4fTsqeg9ccbMCs/kP/PIC4BLq5cCn/7pFnJ2Y7p
+Odfs2kX89TyPQTqs9VQb2RL0ENsrz6E1niXNLOQK6nl7qOVuAuK3MAV9e3GhW4yC
+49QFggf3OsrnngAuWTdsp0gOBLAuwzRtp0f06j4rtAakG168mYNIP2FW0hryF79Y
+PkXz9NRn6DiQ4D+XuDMpTE7W4qB7eQU//EaDsqD3K1Cgy8D0CNt3l0HD88ugmdA6
+3w4OY1QFcEE2VtaKVUmyDfhBRZkQoQggYKyVryXIWuowq96LWnV5DO+lZnl9dSP8
+m4yycf6lK0gymLgcE6qEtvKiPJcVJ0SV7lBShVgG4+S5Sl+ppVI7sgE13p5wclMn
+uCiPT8cip/3pmKOQXEtYz+gqe0Mn2fNDhmTP6ULpqLUVqlDZJ2TrarK5/xKHgTFE
+wurWj0U9V0tV9a1Nujh4Ajr2rRMF84KbTVTs6cfZNERbO2Ia6cUgtuG9+iHs5tfU
+8f/2mpa0B6IqvxK2oPgUChQ9m7l60ZSa3evw0SnT5u9nYMllLvjfWGzYtMTbrKmv
+rohYSJwsuWV4pjU7pHFlNVY7WIkMLAH/pwz66ceViQIcBBABCgAGBQJSE91TAAoJ
+EG8M2ucAtomdUr0P/jx6Ui5FiklYxSp45nenBlrOy4hIFCGQE+wxjcUBPr8rPc4v
+whkZRxpd/aTJ4a4zFJqRuIaqccOFME0efmy/N+pW30nTaz3w9uIOQuqKpxotQfw+
+akUyVjZcvBZRdi+e0aZ9+qvrvdHox3558kpvh9lL4vNUuWYr3cgiHRvVzwZtDXXR
+3vA0/jJapGwrgf2IY6ZApfHHXnx0I4sU+lFZG+x6aZ1af4RsBI8krzOGfqYR6cM0
+BpfOlXXEfyx6A20PkxmeG7G4FBWBvIsfbt8Ur4FPGSVs/gc9rf94m3lCflB0SKCH
+l4asOQqNoROVZGvLdfy+3zym4xj+AeZA8ASosAbyOLa5f4rFBNfbYJVbpOMVzn+Q
+IkXP0o9JGypy2HCyQ0hZCMGWBPhT6RauSL6JvxMxiXJFFL0mf3dPIIQlX/uZyJQe
+HQyy7voMcaw6P46ZFMqj60IgWEVwXf2zNmfvdPFRPebeM7VdVqxkWJI8D9r6LhMF
+OcmquzANdrMjjlZmlpHm+1oEXIhOk1UIUhIYdJ3JsnVhxTvoCBGLSKnI4qiQK3/P
+itH3H1qHGOdEWjkVaj0/wg9FeVPvhU29lfkkfU7lu6wg/mi702H1J1pwtXZcSR6L
+eYaUiKFUWXZ7wVJ3fSdEyf8Ll6i5NZFxLZkMzZ4c6sQyfJBU/ofb4yi5991biQI3
+BBMBAgAhAhsDAh4BAheABQJSArHZBQsJCAcDBRUKCQgLBRYCAwEAAAoJED5IwMbv
+NiueULQP/A8bz8JO4svxPZA4LCss4NZX6yWZXBtVPAHGQmqO9HaTf8EoulCnO7qF
+ut2KsJFHJlMsXio46d+g30axFLqMgVPdiaKhV3RvpAXGm7jkVbJwDrIb9BExkidg
+ufemeV/7XI/SS5Opt6wx282wVGMfi+RljKqTno28OaoU2zh1Ro199ZZslr75zPua
+Ue7Ai/r5CqKWt4wwaN4I05G9dFBQ4G5lphKPDaIA6sNxtrLqBZfLHqmJ0OvTZDUF
+8nvDDgj6evcndDJISRu/SPIZJC8d9HzUJo5JKKDlpqRzQqF+52eLMVTwtoM2j5lF
+a3WUSeaCwAu1/khjNSjfNzdINpbpZOQ9dum8BqgR7kHE2bFdwLm3HAbnDfBwWN+p
+ylfZr8RqF+nzpfF+uU3xh6HTqXysS+SUyAmqQPFuBrzSY0u3XI2ez5MzW2EoOCgM
+HPn5Pv5DoyosUTqyu98/lXhmc2/FXbRhjrlppCMfj8orRutQXCk+rx7gtIqeNuLM
+Kpk5C5I501c9zuGLJ8PEfKMc1fu6l660lLwtBicnvFvOnI39DovAJwpxEBncgtiL
+u+utAlJeByz9k3Jig626GM8irP3BleO6anhS2uAL3ycbICkWPv9ml0sB0qJ4gX/a
+MbT3RWpfeBRB50SMsCX52orzxaG8T2KwoerMQHry66le96Lt0s6uuQINBFICr4IB
+EAC9ex37p1brS8P1h5KTcEZqcabV3HwmqjdrggdwV2iDaNlK/YQ0l1WBqWceeGHq

hbase git commit: HBASE-19610 Fixed Checkstyle errors in hbase-protocol and enabled Checkstyle to fail on violations

2017-12-27 Thread janh
Repository: hbase
Updated Branches:
  refs/heads/master 6b39062e8 -> 3ed68fd70


HBASE-19610 Fixed Checkstyle errors in hbase-protocol and enabled Checkstyle to 
fail on violations


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3ed68fd7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3ed68fd7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3ed68fd7

Branch: refs/heads/master
Commit: 3ed68fd70d7ee187553627998ee39581309296e2
Parents: 6b39062
Author: Jan Hentschel 
Authored: Sun Dec 24 13:59:27 2017 +0100
Committer: Jan Hentschel 
Committed: Wed Dec 27 17:26:54 2017 +0100

--
 hbase-protocol/pom.xml  | 16 
 .../google/protobuf/HBaseZeroCopyByteString.java|  2 +-
 .../org/apache/hadoop/hbase/util/ByteStringer.java  |  8 
 3 files changed, 21 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3ed68fd7/hbase-protocol/pom.xml
--
diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml
index d07a031..ec7f66a 100644
--- a/hbase-protocol/pom.xml
+++ b/hbase-protocol/pom.xml
@@ -112,6 +112,22 @@
 
 
   
+  
+org.apache.maven.plugins
+maven-checkstyle-plugin
+
+  
+checkstyle
+validate
+
+  check
+
+
+  true
+
+  
+
+  
 
 
   

http://git-wip-us.apache.org/repos/asf/hbase/blob/3ed68fd7/hbase-protocol/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java
--
diff --git 
a/hbase-protocol/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java 
b/hbase-protocol/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java
index 9d75612..3d1953e 100644
--- 
a/hbase-protocol/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java
+++ 
b/hbase-protocol/src/main/java/com/google/protobuf/HBaseZeroCopyByteString.java
@@ -61,7 +61,7 @@ public final class HBaseZeroCopyByteString extends 
LiteralByteString {
   /**
* Extracts the byte array from the given {@link ByteString} without copy.
* @param buf A buffer from which to extract the array.  This buffer must be
-   * actually an instance of a {@code LiteralByteString}.
+   *actually an instance of a {@code LiteralByteString}.
* @return byte[] representation
*/
   public static byte[] zeroCopyGetBytes(final ByteString buf) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/3ed68fd7/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java
--
diff --git 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java
index 65f1cc6..581741d 100644
--- 
a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java
+++ 
b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/util/ByteStringer.java
@@ -17,18 +17,18 @@
  */
 package org.apache.hadoop.hbase.util;
 
+import com.google.protobuf.ByteString;
+import com.google.protobuf.HBaseZeroCopyByteString;
+
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.protobuf.ByteString;
-import com.google.protobuf.HBaseZeroCopyByteString;
-
 /**
  * Hack to workaround HBASE-10304 issue that keeps bubbling up when a 
mapreduce context.
  */
 @InterfaceAudience.Private
-public class ByteStringer {
+public final class ByteStringer {
   private static final Logger LOG = 
LoggerFactory.getLogger(ByteStringer.class);
 
   /**



[38/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
index 0001290..ad7ab6b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
@@ -131,7 +131,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class PrivateCellUtil.FirstOnRowByteBufferCell
+private static class PrivateCellUtil.FirstOnRowByteBufferCell
 extends PrivateCellUtil.EmptyByteBufferCell
 
 
@@ -266,7 +266,7 @@ extends PrivateCellUtil.EmptyByteBufferCell
-getFamilyArray,
 getFamilyByteBuffer,
 getFamilyLength,
 getFamilyOffset,
 getFamilyPosition,
 getQualifierArray,
 getQualifierByteBuffer,
 getQualifierLength, getQualifierOffset,
 getQualifierPosition,
 getRowArray,
 getRowOffset,
 getSequenceId,
 getTag,
 getTags,
 getTagsArray, getTagsByteBuffer,
 getTagsLength,
 getTagsOffset,
 getTagsPosition,
 getValueArray,
 getValueByteBuffer,
 getValueLength,
 getValueOffset, getValuePosition,
 setSequenceId,
 setTimestamp,
 setTimestamp
+getFamilyArray,
 getFamilyByteBuffer,
 getFamilyLength,
 getFamilyOffset,
 getFamilyPosition,
 getQualifierArray,
 getQualifierByteBuffer,
 getQualifierLength, getQualifierOffset,
 getQualifierPosition,
 getRowArray,
 getRowOffset,
 getSequenceId,
 getTagsArray,
 getTagsByteBuffer,
 getTagsLength, getTagsOffset,
 getTagsPosition,
 getValueArray,
 getValueByteBuffer,
 getValueLength,
 getValueOffset,
 getValuePosition,
 setSequenceId, setTimestamp,
 setTimestamp
 
 
 
@@ -287,7 +287,7 @@ extends RawCell
-checkForTagsLength,
 cloneTags
+checkForTagsLength,
 cloneTags,
 getTag,
 getTags
 
 
 
@@ -309,7 +309,7 @@ extends 
 
 FIXED_OVERHEAD
-private static finalint FIXED_OVERHEAD
+private static finalint FIXED_OVERHEAD
 
 
 
@@ -318,7 +318,7 @@ extends 
 
 rowBuff
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer rowBuff
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer rowBuff
 
 
 
@@ -327,7 +327,7 @@ extends 
 
 roffset
-private finalint roffset
+private finalint roffset
 
 
 
@@ -336,7 +336,7 @@ extends 
 
 rlength
-private finalshort rlength
+private finalshort rlength
 
 
 
@@ -353,7 +353,7 @@ extends 
 
 FirstOnRowByteBufferCell
-publicFirstOnRowByteBufferCell(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferrow,
+publicFirstOnRowByteBufferCell(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferrow,
 introffset,
 shortrlength)
 
@@ -372,7 +372,7 @@ extends 
 
 heapSize
-publiclongheapSize()
+publiclongheapSize()
 
 Returns:
 Approximate 'exclusive deep size' of implementing object.  Includes
@@ -386,7 +386,7 @@ extends 
 
 getRowByteBuffer
-publichttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffergetRowByteBuffer()
+publichttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffergetRowByteBuffer()
 
 Overrides:
 getRowByteBufferin
 classPrivateCellUtil.EmptyByteBufferCell
@@ -401,7 +401,7 @@ extends 
 
 getRowPosition
-publicintgetRowPosition()
+publicintgetRowPosition()
 
 Overrides:
 getRowPositionin
 classPrivateCellUtil.EmptyByteBufferCell
@@ -416,7 +416,7 @@ extends 
 
 getRowLength
-publicshortgetRowLength()
+publicshortgetRowLength()
 
 Specified by:
 getRowLengthin
 interfaceCell
@@ -433,7 +433,7 @@ extends 
 
 getTimestamp
-publiclonggetTimestamp()
+publiclonggetTimestamp()
 
 Returns:
 Long value representing time at which this cell was "Put" into the row.  
Typically
@@ -447,7 +447,7 @@ extends 
 
 getTypeByte
-publicbytegetTypeByte()
+publicbytegetTypeByte()
 
 Returns:
 The byte representation of the KeyValue.TYPE of this cell: one of Put, 
Delete, etc
@@ -460,7 +460,7 @@ extends 
 
 getType
-publicCell.DataTypegetType()
+publicCell.DataTypegetType()
 Description copied from 
interface:ExtendedCell
 Returns the type of cell in a human readable format using 
Cell.DataType
  

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowCell.html
--
diff --git 

[06/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSByteBufferCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSByteBufferCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSByteBufferCell.html
index 3400507..2baa140 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSByteBufferCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSByteBufferCell.html
@@ -28,3034 +28,2926 @@
 020import static 
org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
 021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
 022
-023import 
com.google.common.annotations.VisibleForTesting;
-024
-025import java.io.DataOutput;
-026import java.io.DataOutputStream;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.math.BigDecimal;
-030import java.nio.ByteBuffer;
-031import java.util.ArrayList;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Optional;
-035
-036import 
org.apache.hadoop.hbase.KeyValue.Type;
-037import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-038import 
org.apache.hadoop.hbase.io.HeapSize;
-039import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-040import 
org.apache.hadoop.hbase.io.util.Dictionary;
-041import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-042import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-043import 
org.apache.hadoop.hbase.util.ByteRange;
-044import 
org.apache.hadoop.hbase.util.Bytes;
-045import 
org.apache.hadoop.hbase.util.ClassSize;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047
-048
-049/**
-050 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
-051 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
-052 */
-053@InterfaceAudience.Private
-054public final class PrivateCellUtil {
-055
-056  /**
-057   * Private constructor to keep this 
class from being instantiated.
-058   */
-059  private PrivateCellUtil() {
-060  }
+023import java.io.DataOutput;
+024import java.io.DataOutputStream;
+025import java.io.IOException;
+026import java.io.OutputStream;
+027import java.math.BigDecimal;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Iterator;
+031import java.util.List;
+032import java.util.Optional;
+033import 
org.apache.hadoop.hbase.KeyValue.Type;
+034import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
+035import 
org.apache.hadoop.hbase.io.HeapSize;
+036import 
org.apache.hadoop.hbase.io.TagCompressionContext;
+037import 
org.apache.hadoop.hbase.io.util.Dictionary;
+038import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+039import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+040import 
org.apache.hadoop.hbase.util.ByteRange;
+041import 
org.apache.hadoop.hbase.util.Bytes;
+042import 
org.apache.hadoop.hbase.util.ClassSize;
+043import 
org.apache.yetus.audience.InterfaceAudience;
+044
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+046
+047/**
+048 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
+049 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
+050 */
+051@InterfaceAudience.Private
+052public final class PrivateCellUtil {
+053
+054  /**
+055   * Private constructor to keep this 
class from being instantiated.
+056   */
+057  private PrivateCellUtil() {
+058  }
+059
+060  /*** ByteRange 
***/
 061
-062  /*** ByteRange 
***/
-063
-064  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-065return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-066  }
-067
-068  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-069return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-070  }
-071
-072  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-073return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-074  cell.getQualifierLength());
-075  }
-076
-077  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-078return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-079  }
-080
-081  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-082return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-083  }
+062  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
+063return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
+064  }
+065
+066  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange 

[05/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSCell.html
index 3400507..2baa140 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColTSCell.html
@@ -28,3034 +28,2926 @@
 020import static 
org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
 021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
 022
-023import 
com.google.common.annotations.VisibleForTesting;
-024
-025import java.io.DataOutput;
-026import java.io.DataOutputStream;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.math.BigDecimal;
-030import java.nio.ByteBuffer;
-031import java.util.ArrayList;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Optional;
-035
-036import 
org.apache.hadoop.hbase.KeyValue.Type;
-037import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-038import 
org.apache.hadoop.hbase.io.HeapSize;
-039import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-040import 
org.apache.hadoop.hbase.io.util.Dictionary;
-041import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-042import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-043import 
org.apache.hadoop.hbase.util.ByteRange;
-044import 
org.apache.hadoop.hbase.util.Bytes;
-045import 
org.apache.hadoop.hbase.util.ClassSize;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047
-048
-049/**
-050 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
-051 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
-052 */
-053@InterfaceAudience.Private
-054public final class PrivateCellUtil {
-055
-056  /**
-057   * Private constructor to keep this 
class from being instantiated.
-058   */
-059  private PrivateCellUtil() {
-060  }
+023import java.io.DataOutput;
+024import java.io.DataOutputStream;
+025import java.io.IOException;
+026import java.io.OutputStream;
+027import java.math.BigDecimal;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Iterator;
+031import java.util.List;
+032import java.util.Optional;
+033import 
org.apache.hadoop.hbase.KeyValue.Type;
+034import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
+035import 
org.apache.hadoop.hbase.io.HeapSize;
+036import 
org.apache.hadoop.hbase.io.TagCompressionContext;
+037import 
org.apache.hadoop.hbase.io.util.Dictionary;
+038import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+039import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+040import 
org.apache.hadoop.hbase.util.ByteRange;
+041import 
org.apache.hadoop.hbase.util.Bytes;
+042import 
org.apache.hadoop.hbase.util.ClassSize;
+043import 
org.apache.yetus.audience.InterfaceAudience;
+044
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+046
+047/**
+048 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
+049 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
+050 */
+051@InterfaceAudience.Private
+052public final class PrivateCellUtil {
+053
+054  /**
+055   * Private constructor to keep this 
class from being instantiated.
+056   */
+057  private PrivateCellUtil() {
+058  }
+059
+060  /*** ByteRange 
***/
 061
-062  /*** ByteRange 
***/
-063
-064  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-065return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-066  }
-067
-068  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-069return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-070  }
-071
-072  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-073return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-074  cell.getQualifierLength());
-075  }
-076
-077  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-078return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-079  }
-080
-081  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-082return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-083  }
+062  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
+063return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
+064  }
+065
+066  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
+067return 

[01/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 4cddebd1e -> d2b28a1a2


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowColByteBufferCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowColByteBufferCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowColByteBufferCell.html
index 3400507..2baa140 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowColByteBufferCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowColByteBufferCell.html
@@ -28,3034 +28,2926 @@
 020import static 
org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
 021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
 022
-023import 
com.google.common.annotations.VisibleForTesting;
-024
-025import java.io.DataOutput;
-026import java.io.DataOutputStream;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.math.BigDecimal;
-030import java.nio.ByteBuffer;
-031import java.util.ArrayList;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Optional;
-035
-036import 
org.apache.hadoop.hbase.KeyValue.Type;
-037import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-038import 
org.apache.hadoop.hbase.io.HeapSize;
-039import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-040import 
org.apache.hadoop.hbase.io.util.Dictionary;
-041import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-042import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-043import 
org.apache.hadoop.hbase.util.ByteRange;
-044import 
org.apache.hadoop.hbase.util.Bytes;
-045import 
org.apache.hadoop.hbase.util.ClassSize;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047
-048
-049/**
-050 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
-051 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
-052 */
-053@InterfaceAudience.Private
-054public final class PrivateCellUtil {
-055
-056  /**
-057   * Private constructor to keep this 
class from being instantiated.
-058   */
-059  private PrivateCellUtil() {
-060  }
+023import java.io.DataOutput;
+024import java.io.DataOutputStream;
+025import java.io.IOException;
+026import java.io.OutputStream;
+027import java.math.BigDecimal;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Iterator;
+031import java.util.List;
+032import java.util.Optional;
+033import 
org.apache.hadoop.hbase.KeyValue.Type;
+034import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
+035import 
org.apache.hadoop.hbase.io.HeapSize;
+036import 
org.apache.hadoop.hbase.io.TagCompressionContext;
+037import 
org.apache.hadoop.hbase.io.util.Dictionary;
+038import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+039import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+040import 
org.apache.hadoop.hbase.util.ByteRange;
+041import 
org.apache.hadoop.hbase.util.Bytes;
+042import 
org.apache.hadoop.hbase.util.ClassSize;
+043import 
org.apache.yetus.audience.InterfaceAudience;
+044
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+046
+047/**
+048 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
+049 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
+050 */
+051@InterfaceAudience.Private
+052public final class PrivateCellUtil {
+053
+054  /**
+055   * Private constructor to keep this 
class from being instantiated.
+056   */
+057  private PrivateCellUtil() {
+058  }
+059
+060  /*** ByteRange 
***/
 061
-062  /*** ByteRange 
***/
-063
-064  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-065return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-066  }
-067
-068  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-069return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-070  }
-071
-072  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-073return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-074  cell.getQualifierLength());
-075  }
-076
-077  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-078return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-079  }
-080
-081  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-082return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-083  }
+062  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
+063return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
+064  }

hbase git commit: HBASE-19629 RawCell#getTags should return the Iterator in order to avoid iterating through whole tag array at once

2017-12-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 7145d9818 -> 467a4667d


HBASE-19629 RawCell#getTags should return the Iterator in order to avoid 
iterating through whole tag array at once

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/467a4667
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/467a4667
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/467a4667

Branch: refs/heads/master
Commit: 467a4667d8417680686fbd754cb52dc52069307a
Parents: 7145d98
Author: Vasudevan 
Authored: Wed Dec 27 21:29:28 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Dec 27 21:33:42 2017 +0800

--
 .../apache/hadoop/hbase/client/Mutation.java|  24 +---
 .../apache/hadoop/hbase/ByteBufferKeyValue.java |  34 --
 .../hadoop/hbase/IndividualBytesFieldCell.java  |  33 --
 .../java/org/apache/hadoop/hbase/KeyValue.java  |  37 +-
 .../apache/hadoop/hbase/PrivateCellUtil.java| 116 +--
 .../java/org/apache/hadoop/hbase/RawCell.java   |  10 +-
 .../io/encoding/BufferedDataBlockEncoder.java   |  62 --
 .../hadoop/hbase/TestByteBufferKeyValue.java|   2 +-
 .../org/apache/hadoop/hbase/TestKeyValue.java   |   2 +-
 .../hbase/codec/TestCellCodecWithTags.java  |   9 +-
 .../hbase/codec/TestKeyValueCodecWithTags.java  |   9 +-
 .../apache/hadoop/hbase/util/MapReduceCell.java |  34 --
 .../hbase/mapreduce/TestHFileOutputFormat2.java |   4 +-
 .../hbase/io/hfile/HFilePrettyPrinter.java  |   3 +-
 .../TestStoreFileScannerWithTagCompression.java |   3 +-
 .../hadoop/hbase/regionserver/TestTags.java |   4 +-
 .../wal/TestWALCellCodecWithCompression.java|   7 +-
 .../replication/TestReplicationWithTags.java|   4 +-
 .../TestVisibilityLabelsReplication.java|   9 +-
 19 files changed, 46 insertions(+), 360 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/467a4667/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
index 9472d70..8a29574 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.hbase.client;
 
-import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
-
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
@@ -33,7 +31,6 @@ import java.util.Optional;
 import java.util.TreeMap;
 import java.util.UUID;
 import java.util.stream.Collectors;
-import org.apache.hadoop.hbase.ArrayBackedTag;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScannable;
 import org.apache.hadoop.hbase.CellScanner;
@@ -60,7 +57,6 @@ import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
 import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput;
 import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput;
 import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams;
@@ -915,35 +911,23 @@ public abstract class Mutation extends 
OperationWithAttributes implements Row, C
   if (cell instanceof RawCell) {
 return ((RawCell) cell).getTag(type);
   }
-  int length = getTagsLength();
-  int offset = getTagsOffset();
-  int pos = offset;
-  while (pos < offset + length) {
-int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE);
-if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
-  return Optional.of(new ArrayBackedTag(getTagsArray(), pos,
-tagLen + TAG_LENGTH_SIZE));
-}
-pos += TAG_LENGTH_SIZE + tagLen;
-  }
-  return Optional.empty();
+  return PrivateCellUtil.getTag(cell, type);
 }
 
 @Override
-public List getTags() {
+public Iterator getTags() {
   if (cell instanceof RawCell) {
 return ((RawCell) cell).getTags();
   }
-  return Lists.newArrayList(PrivateCellUtil.tagsIterator(cell));
+  return PrivateCellUtil.tagsIterator(cell);
 }
 
 @Override
 public byte[] cloneTags() {
   if (cell instanceof RawCell) {
 return ((RawCell) cell).cloneTags();
-  } 

hbase git commit: HBASE-19629 RawCell#getTags should return the Iterator in order to avoid iterating through whole tag array at once

2017-12-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 15ed74828 -> 0454a389a


HBASE-19629 RawCell#getTags should return the Iterator in order to avoid 
iterating through whole tag array at once

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0454a389
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0454a389
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0454a389

Branch: refs/heads/branch-2
Commit: 0454a389a378675bf8c63e8f9f1ff1fd086962a7
Parents: 15ed748
Author: Vasudevan 
Authored: Wed Dec 27 21:29:28 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Dec 27 21:30:08 2017 +0800

--
 .../apache/hadoop/hbase/client/Mutation.java|  24 +---
 .../apache/hadoop/hbase/ByteBufferKeyValue.java |  34 --
 .../hadoop/hbase/IndividualBytesFieldCell.java  |  33 --
 .../java/org/apache/hadoop/hbase/KeyValue.java  |  37 +-
 .../apache/hadoop/hbase/PrivateCellUtil.java| 116 +--
 .../java/org/apache/hadoop/hbase/RawCell.java   |  10 +-
 .../io/encoding/BufferedDataBlockEncoder.java   |  62 --
 .../hadoop/hbase/TestByteBufferKeyValue.java|   2 +-
 .../org/apache/hadoop/hbase/TestKeyValue.java   |   2 +-
 .../hbase/codec/TestCellCodecWithTags.java  |   9 +-
 .../hbase/codec/TestKeyValueCodecWithTags.java  |   9 +-
 .../apache/hadoop/hbase/util/MapReduceCell.java |  34 --
 .../hbase/mapreduce/TestHFileOutputFormat2.java |   4 +-
 .../hbase/io/hfile/HFilePrettyPrinter.java  |   3 +-
 .../TestStoreFileScannerWithTagCompression.java |   3 +-
 .../hadoop/hbase/regionserver/TestTags.java |   4 +-
 .../wal/TestWALCellCodecWithCompression.java|   7 +-
 .../replication/TestReplicationWithTags.java|   4 +-
 .../TestVisibilityLabelsReplication.java|   9 +-
 19 files changed, 46 insertions(+), 360 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0454a389/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
index 9472d70..8a29574 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.hbase.client;
 
-import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
-
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
@@ -33,7 +31,6 @@ import java.util.Optional;
 import java.util.TreeMap;
 import java.util.UUID;
 import java.util.stream.Collectors;
-import org.apache.hadoop.hbase.ArrayBackedTag;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScannable;
 import org.apache.hadoop.hbase.CellScanner;
@@ -60,7 +57,6 @@ import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
 import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput;
 import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput;
 import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams;
@@ -915,35 +911,23 @@ public abstract class Mutation extends 
OperationWithAttributes implements Row, C
   if (cell instanceof RawCell) {
 return ((RawCell) cell).getTag(type);
   }
-  int length = getTagsLength();
-  int offset = getTagsOffset();
-  int pos = offset;
-  while (pos < offset + length) {
-int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE);
-if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
-  return Optional.of(new ArrayBackedTag(getTagsArray(), pos,
-tagLen + TAG_LENGTH_SIZE));
-}
-pos += TAG_LENGTH_SIZE + tagLen;
-  }
-  return Optional.empty();
+  return PrivateCellUtil.getTag(cell, type);
 }
 
 @Override
-public List getTags() {
+public Iterator getTags() {
   if (cell instanceof RawCell) {
 return ((RawCell) cell).getTags();
   }
-  return Lists.newArrayList(PrivateCellUtil.tagsIterator(cell));
+  return PrivateCellUtil.tagsIterator(cell);
 }
 
 @Override
 public byte[] cloneTags() {
   if (cell instanceof RawCell) {
 return ((RawCell) cell).cloneTags();
-  } 

[04/22] hbase git commit: HBASE-19619 Modify replication_admin.rb to use ReplicationPeerConfigBuilder

2017-12-27 Thread zhangduo
HBASE-19619 Modify replication_admin.rb to use ReplicationPeerConfigBuilder


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/41c2dd04
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/41c2dd04
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/41c2dd04

Branch: refs/heads/HBASE-19397
Commit: 41c2dd04da21bb76208f04af104df2e2f444970d
Parents: 3317b87
Author: tedyu 
Authored: Wed Dec 27 02:35:41 2017 -0800
Committer: tedyu 
Committed: Wed Dec 27 02:35:41 2017 -0800

--
 .../src/main/ruby/hbase/replication_admin.rb| 51 
 1 file changed, 30 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/41c2dd04/hbase-shell/src/main/ruby/hbase/replication_admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/replication_admin.rb 
b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
index f80c547..b9d4a0c 100644
--- a/hbase-shell/src/main/ruby/hbase/replication_admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
@@ -66,22 +66,22 @@ module Hbase
 peer_state = args.fetch(STATE, nil)
 
 # Create and populate a ReplicationPeerConfig
-replication_peer_config = ReplicationPeerConfig.new
-replication_peer_config.set_cluster_key(cluster_key)
+builder = org.apache.hadoop.hbase.replication.ReplicationPeerConfig
+  .newBuilder()
+builder.set_cluster_key(cluster_key)
 
 unless endpoint_classname.nil?
-  
replication_peer_config.set_replication_endpoint_impl(endpoint_classname)
+  builder.set_replication_endpoint_impl(endpoint_classname)
 end
 
 unless config.nil?
-  replication_peer_config.get_configuration.put_all(config)
+  builder.putAllConfiguration(config)
 end
 
 unless data.nil?
   # Convert Strings to Bytes for peer_data
-  peer_data = replication_peer_config.get_peer_data
   data.each do |key, val|
-peer_data.put(Bytes.to_bytes(key), Bytes.to_bytes(val))
+builder.putPeerData(Bytes.to_bytes(key), Bytes.to_bytes(val))
   end
 end
 
@@ -90,8 +90,8 @@ module Hbase
   namespaces.each do |n|
 ns_set.add(n)
   end
-  replication_peer_config.setReplicateAllUserTables(false)
-  replication_peer_config.set_namespaces(ns_set)
+  builder.setReplicateAllUserTables(false)
+  builder.set_namespaces(ns_set)
 end
 
 unless table_cfs.nil?
@@ -100,15 +100,15 @@ module Hbase
   table_cfs.each do |key, val|
 map.put(org.apache.hadoop.hbase.TableName.valueOf(key), val)
   end
-  replication_peer_config.setReplicateAllUserTables(false)
-  replication_peer_config.set_table_cfs_map(map)
+  builder.setReplicateAllUserTables(false)
+  builder.set_table_cfs_map(map)
 end
 
 enabled = true
 unless peer_state.nil?
   enabled = false if peer_state == 'DISABLED'
 end
-@admin.addReplicationPeer(id, replication_peer_config, enabled)
+@admin.addReplicationPeer(id, builder.build, enabled)
   else
 raise(ArgumentError, 'args must be a Hash')
   end
@@ -220,13 +220,18 @@ module Hbase
   unless namespaces.nil?
 rpc = get_peer_config(id)
 unless rpc.nil?
-  ns_set = rpc.getNamespaces
-  ns_set = java.util.HashSet.new if ns_set.nil?
+  if rpc.getNamespaces.nil?
+ns_set = java.util.HashSet.new
+  else
+ns_set = java.util.HashSet.new(rpc.getNamespaces)
+  end
   namespaces.each do |n|
 ns_set.add(n)
   end
-  rpc.setNamespaces(ns_set)
-  @admin.updateReplicationPeerConfig(id, rpc)
+  builder = org.apache.hadoop.hbase.replication.ReplicationPeerConfig
+.newBuilder(rpc)
+  builder.setNamespaces(ns_set)
+  @admin.updateReplicationPeerConfig(id, builder.build)
 end
   end
 end
@@ -238,12 +243,15 @@ module Hbase
 unless rpc.nil?
   ns_set = rpc.getNamespaces
   unless ns_set.nil?
+ns_set = java.util.HashSet.new(ns_set)
 namespaces.each do |n|
   ns_set.remove(n)
 end
   end
-  rpc.setNamespaces(ns_set)
-  @admin.updateReplicationPeerConfig(id, rpc)
+  builder = org.apache.hadoop.hbase.replication.ReplicationPeerConfig
+.newBuilder(rpc)
+  builder.setNamespaces(ns_set)
+  @admin.updateReplicationPeerConfig(id, builder.build)
 end
   end
 end
@@ -353,19 +361,20 

[10/22] hbase git commit: HBASE-19543 Abstract a replication storage interface to extract the zk specific code

2017-12-27 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/f8d80fd0/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationManager.java
deleted file mode 100644
index b6f8784..000
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationManager.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.master.replication;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.regex.Pattern;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.Abortable;
-import org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.replication.BaseReplicationEndpoint;
-import org.apache.hadoop.hbase.replication.ReplicationException;
-import org.apache.hadoop.hbase.replication.ReplicationFactory;
-import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-import org.apache.hadoop.hbase.replication.ReplicationPeers;
-import org.apache.hadoop.hbase.replication.ReplicationQueuesClient;
-import org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments;
-import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-import org.apache.yetus.audience.InterfaceAudience;
-
-/**
- * Manages and performs all replication admin operations.
- * 
- * Used to add/remove a replication peer.
- */
-@InterfaceAudience.Private
-public class ReplicationManager {
-  private final ReplicationQueuesClient replicationQueuesClient;
-  private final ReplicationPeers replicationPeers;
-
-  public ReplicationManager(Configuration conf, ZKWatcher zkw, Abortable 
abortable)
-  throws IOException {
-try {
-  this.replicationQueuesClient = ReplicationFactory
-  .getReplicationQueuesClient(new 
ReplicationQueuesClientArguments(conf, abortable, zkw));
-  this.replicationQueuesClient.init();
-  this.replicationPeers = ReplicationFactory.getReplicationPeers(zkw, conf,
-this.replicationQueuesClient, abortable);
-  this.replicationPeers.init();
-} catch (Exception e) {
-  throw new IOException("Failed to construct ReplicationManager", e);
-}
-  }
-
-  public void addReplicationPeer(String peerId, ReplicationPeerConfig 
peerConfig, boolean enabled)
-  throws ReplicationException {
-checkPeerConfig(peerConfig);
-replicationPeers.registerPeer(peerId, peerConfig, enabled);
-replicationPeers.peerConnected(peerId);
-  }
-
-  public void removeReplicationPeer(String peerId) throws ReplicationException 
{
-replicationPeers.peerDisconnected(peerId);
-replicationPeers.unregisterPeer(peerId);
-  }
-
-  public void enableReplicationPeer(String peerId) throws ReplicationException 
{
-this.replicationPeers.enablePeer(peerId);
-  }
-
-  public void disableReplicationPeer(String peerId) throws 
ReplicationException {
-this.replicationPeers.disablePeer(peerId);
-  }
-
-  public ReplicationPeerConfig getPeerConfig(String peerId)
-  throws ReplicationException, ReplicationPeerNotFoundException {
-ReplicationPeerConfig peerConfig = 
replicationPeers.getReplicationPeerConfig(peerId);
-if (peerConfig == null) {
-  throw new ReplicationPeerNotFoundException(peerId);
-}
-return peerConfig;
-  }
-
-  public void updatePeerConfig(String peerId, ReplicationPeerConfig peerConfig)
-  throws ReplicationException, IOException {
-checkPeerConfig(peerConfig);
-this.replicationPeers.updatePeerConfig(peerId, peerConfig);
-  }
-
-  public List listReplicationPeers(Pattern pattern)
-  throws ReplicationException {
-List peers = new ArrayList<>();
-List peerIds = replicationPeers.getAllPeerIds();
-for (String peerId : 

[22/22] hbase git commit: HBASE-19642 Fix locking for peer modification procedure

2017-12-27 Thread zhangduo
HBASE-19642 Fix locking for peer modification procedure


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/258114e5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/258114e5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/258114e5

Branch: refs/heads/HBASE-19397
Commit: 258114e5ca67b0e08c10e4261d57c8e9c3527b26
Parents: 044385e
Author: zhangduo 
Authored: Wed Dec 27 18:27:13 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 27 21:50:47 2017 +0800

--
 .../procedure/MasterProcedureScheduler.java | 14 +
 .../master/replication/ModifyPeerProcedure.java | 21 +---
 2 files changed, 32 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/258114e5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
index 5f4665c..05c8439 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
@@ -610,6 +610,20 @@ public class MasterProcedureScheduler extends 
AbstractProcedureScheduler {
 public boolean requireExclusiveLock(Procedure proc) {
   return requirePeerExclusiveLock((PeerProcedureInterface) proc);
 }
+
+@Override
+public boolean isAvailable() {
+  if (isEmpty()) {
+return false;
+  }
+  if (getLockStatus().hasExclusiveLock()) {
+// if we have an exclusive lock already taken
+// only child of the lock owner can be executed
+Procedure nextProc = peek();
+return nextProc != null && getLockStatus().hasLockAccess(nextProc);
+  }
+  return true;
+}
   }
 
   // 


http://git-wip-us.apache.org/repos/asf/hbase/blob/258114e5/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ModifyPeerProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ModifyPeerProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ModifyPeerProcedure.java
index 279fbc7..a682606 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ModifyPeerProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ModifyPeerProcedure.java
@@ -46,6 +46,8 @@ public abstract class ModifyPeerProcedure
 
   protected String peerId;
 
+  private volatile boolean locked;
+
   // used to keep compatible with old client where we can only returns after 
updateStorage.
   protected ProcedurePrepareLatch latch;
 
@@ -145,17 +147,30 @@ public abstract class ModifyPeerProcedure
 
   @Override
   protected LockState acquireLock(MasterProcedureEnv env) {
-return env.getProcedureScheduler().waitPeerExclusiveLock(this, peerId)
-  ? LockState.LOCK_EVENT_WAIT
-  : LockState.LOCK_ACQUIRED;
+if (env.getProcedureScheduler().waitPeerExclusiveLock(this, peerId)) {
+  return  LockState.LOCK_EVENT_WAIT;
+}
+locked = true;
+return LockState.LOCK_ACQUIRED;
   }
 
   @Override
   protected void releaseLock(MasterProcedureEnv env) {
+locked = false;
 env.getProcedureScheduler().wakePeerExclusiveLock(this, peerId);
   }
 
   @Override
+  protected boolean holdLock(MasterProcedureEnv env) {
+return true;
+  }
+
+  @Override
+  protected boolean hasLock(MasterProcedureEnv env) {
+return locked;
+  }
+
+  @Override
   protected void rollbackState(MasterProcedureEnv env, PeerModificationState 
state)
   throws IOException, InterruptedException {
 if (state == PeerModificationState.PRE_PEER_MODIFICATION) {



[12/22] hbase git commit: HBASE-19524 Master side changes for moving peer modification from zk watcher to procedure

2017-12-27 Thread zhangduo
HBASE-19524 Master side changes for moving peer modification from zk watcher to 
procedure


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/eb56ba18
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/eb56ba18
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/eb56ba18

Branch: refs/heads/HBASE-19397
Commit: eb56ba188fee45de52581d09f15362f5279766a3
Parents: 4811479
Author: zhangduo 
Authored: Mon Dec 18 15:22:36 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 27 21:50:17 2017 +0800

--
 .../procedure2/RemoteProcedureDispatcher.java   |   3 +-
 .../src/main/protobuf/MasterProcedure.proto |  21 +++-
 .../src/main/protobuf/RegionServerStatus.proto  |   3 +-
 .../src/main/protobuf/Replication.proto |   5 +
 .../replication/ReplicationPeersZKImpl.java |   4 +-
 .../org/apache/hadoop/hbase/master/HMaster.java | 100 ---
 .../hadoop/hbase/master/MasterRpcServices.java  |   4 +-
 .../hadoop/hbase/master/MasterServices.java |  26 +++--
 .../assignment/RegionTransitionProcedure.java   |  11 +-
 .../master/procedure/MasterProcedureEnv.java|   5 +
 .../master/procedure/ProcedurePrepareLatch.java |   2 +-
 .../master/replication/AddPeerProcedure.java|  97 ++
 .../replication/DisablePeerProcedure.java   |  70 +
 .../master/replication/EnablePeerProcedure.java |  69 +
 .../master/replication/ModifyPeerProcedure.java |  97 +++---
 .../master/replication/RefreshPeerCallable.java |  67 -
 .../replication/RefreshPeerProcedure.java   |  28 --
 .../master/replication/RemovePeerProcedure.java |  69 +
 .../master/replication/ReplicationManager.java  |  76 +++---
 .../replication/UpdatePeerConfigProcedure.java  |  92 +
 .../hbase/regionserver/HRegionServer.java   |   6 +-
 .../regionserver/RefreshPeerCallable.java   |  70 +
 .../hbase/master/MockNoopMasterServices.java|  23 +++--
 .../replication/DummyModifyPeerProcedure.java   |  13 ++-
 24 files changed, 736 insertions(+), 225 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/eb56ba18/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
index e9a6906..1235b33 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
@@ -247,9 +247,8 @@ public abstract class RemoteProcedureDispatcher

[01/22] hbase git commit: HBASE-19624 TestIOFencing hangs [Forced Update!]

2017-12-27 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/HBASE-19397 350f5d126 -> 258114e5c (forced update)


HBASE-19624 TestIOFencing hangs


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0e7fff58
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0e7fff58
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0e7fff58

Branch: refs/heads/HBASE-19397
Commit: 0e7fff58292122512a4aa444a53c71608e9a5bce
Parents: 5a561e0
Author: Chia-Ping Tsai 
Authored: Wed Dec 27 09:33:53 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Dec 27 09:33:53 2017 +0800

--
 .../java/org/apache/hadoop/hbase/regionserver/CompactSplit.java | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0e7fff58/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
index e143511..28fc1a3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
@@ -397,6 +397,7 @@ public class CompactSplit implements CompactionRequester, 
PropagatingConfigurati
 }
   } catch (InterruptedException ie) {
 LOG.warn("Interrupted waiting for " + name + " to finish...");
+t.shutdownNow();
   }
 }
   }



[02/22] hbase git commit: HBASE-19615 CompositeImmutableSegment ArrayList Instead of LinkedList

2017-12-27 Thread zhangduo
HBASE-19615 CompositeImmutableSegment ArrayList Instead of LinkedList

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0e85a880
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0e85a880
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0e85a880

Branch: refs/heads/HBASE-19397
Commit: 0e85a880fb174b7fa221e24bc1f6ea32a98acc62
Parents: 0e7fff5
Author: BELUGA BEHR 
Authored: Wed Dec 27 09:59:38 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Dec 27 10:04:53 2017 +0800

--
 .../hadoop/hbase/regionserver/CompositeImmutableSegment.java  | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0e85a880/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
index 5e8a8b3..bf9ff13 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.regionserver;
 
 import java.util.ArrayList;
 import java.util.Iterator;
-import java.util.LinkedList;
 import java.util.List;
 import java.util.SortedSet;
 
@@ -56,7 +55,7 @@ public class CompositeImmutableSegment extends 
ImmutableSegment {
   @VisibleForTesting
   @Override
   public List getAllSegments() {
-return new LinkedList<>(segments);
+return new ArrayList<>(segments);
   }
 
   @Override



[18/22] hbase git commit: HBASE-19573 Rewrite ReplicationPeer with the new replication storage interface

2017-12-27 Thread zhangduo
HBASE-19573 Rewrite ReplicationPeer with the new replication storage interface


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9a411099
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9a411099
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9a411099

Branch: refs/heads/HBASE-19397
Commit: 9a411099cd1342195347fa60a0afe7213ca1eada
Parents: 6facf2a
Author: Guanghao Zhang 
Authored: Tue Dec 26 11:39:34 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 27 21:50:17 2017 +0800

--
 .../replication/VerifyReplication.java  |   5 -
 .../hbase/replication/ReplicationPeer.java  |  42 ++--
 .../hbase/replication/ReplicationPeerImpl.java  | 170 ++
 .../replication/ReplicationPeerZKImpl.java  | 233 ---
 .../hbase/replication/ReplicationPeers.java |   4 +-
 .../replication/ReplicationPeersZKImpl.java |  23 +-
 .../replication/TestReplicationStateBasic.java  |   7 +-
 .../regionserver/PeerProcedureHandlerImpl.java  |  29 +--
 8 files changed, 217 insertions(+), 296 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9a411099/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
--
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
index 01df2bd..da231e6 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java
@@ -49,7 +49,6 @@ import org.apache.hadoop.hbase.mapreduce.TableSplit;
 import org.apache.hadoop.hbase.replication.ReplicationException;
 import org.apache.hadoop.hbase.replication.ReplicationFactory;
 import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-import org.apache.hadoop.hbase.replication.ReplicationPeerZKImpl;
 import org.apache.hadoop.hbase.replication.ReplicationPeers;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
@@ -330,7 +329,6 @@ public class VerifyReplication extends Configured 
implements Tool {
   private static Pair 
getPeerQuorumConfig(
   final Configuration conf, String peerId) throws IOException {
 ZKWatcher localZKW = null;
-ReplicationPeerZKImpl peer = null;
 try {
   localZKW = new ZKWatcher(conf, "VerifyReplication",
   new Abortable() {
@@ -351,9 +349,6 @@ public class VerifyReplication extends Configured 
implements Tool {
   throw new IOException(
   "An error occurred while trying to connect to the remove peer 
cluster", e);
 } finally {
-  if (peer != null) {
-peer.close();
-  }
   if (localZKW != null) {
 localZKW.close();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/9a411099/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
index b66d76d..4846018 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeer.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.yetus.audience.InterfaceAudience;
 
-
 /**
  * ReplicationPeer manages enabled / disabled state for the peer.
  */
@@ -49,65 +48,52 @@ public interface ReplicationPeer {
   String getId();
 
   /**
-   * Get the peer config object
-   * @return the ReplicationPeerConfig for this peer
-   */
-  public ReplicationPeerConfig getPeerConfig();
-
-  /**
-   * Get the peer config object. if loadFromBackingStore is true, it will load 
from backing store
-   * directly and update its load peer config. otherwise, just return the 
local cached peer config.
-   * @return the ReplicationPeerConfig for this peer
-   */
-  public ReplicationPeerConfig getPeerConfig(boolean loadFromBackingStore)
-  throws ReplicationException;
-
-  /**
* Returns the state of the peer by reading local cache.
* @return the enabled state
*/
   PeerState getPeerState();
 
   /**
-   * Returns the state of peer, if loadFromBackingStore is true, it will load 
from backing store
-   * directly and 

[16/22] hbase git commit: HBASE-19579 Add peer lock test for shell command list_locks

2017-12-27 Thread zhangduo
HBASE-19579 Add peer lock test for shell command list_locks

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6facf2a7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6facf2a7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6facf2a7

Branch: refs/heads/HBASE-19397
Commit: 6facf2a76de3c246bdf3db7c79b0a12a51dc9ed1
Parents: 0d6b613
Author: Guanghao Zhang 
Authored: Sat Dec 23 21:04:27 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 27 21:50:17 2017 +0800

--
 .../src/main/protobuf/LockService.proto  |  1 +
 .../src/test/ruby/shell/list_locks_test.rb   | 19 +++
 2 files changed, 20 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6facf2a7/hbase-protocol-shaded/src/main/protobuf/LockService.proto
--
diff --git a/hbase-protocol-shaded/src/main/protobuf/LockService.proto 
b/hbase-protocol-shaded/src/main/protobuf/LockService.proto
index b8d180c..0675070 100644
--- a/hbase-protocol-shaded/src/main/protobuf/LockService.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/LockService.proto
@@ -77,6 +77,7 @@ enum LockedResourceType {
   NAMESPACE = 2;
   TABLE = 3;
   REGION = 4;
+  PEER = 5;
 }
 
 message LockedResource {

http://git-wip-us.apache.org/repos/asf/hbase/blob/6facf2a7/hbase-shell/src/test/ruby/shell/list_locks_test.rb
--
diff --git a/hbase-shell/src/test/ruby/shell/list_locks_test.rb 
b/hbase-shell/src/test/ruby/shell/list_locks_test.rb
index f465a6b..ef1c0ce 100644
--- a/hbase-shell/src/test/ruby/shell/list_locks_test.rb
+++ b/hbase-shell/src/test/ruby/shell/list_locks_test.rb
@@ -67,6 +67,25 @@ module Hbase
 proc_id)
 end
 
+define_test 'list peer locks' do
+  lock = create_exclusive_lock(0)
+  peer_id = '1'
+
+  @scheduler.waitPeerExclusiveLock(lock, peer_id)
+  output = capture_stdout { @list_locks.command }
+  @scheduler.wakePeerExclusiveLock(lock, peer_id)
+
+  assert_equal(
+"PEER(1)\n" \
+"Lock type: EXCLUSIVE, procedure: {" \
+  
"\"className\"=>\"org.apache.hadoop.hbase.master.locking.LockProcedure\", " \
+  "\"procId\"=>\"0\", \"submittedTime\"=>\"0\", 
\"state\"=>\"RUNNABLE\", " \
+  "\"lastUpdate\"=>\"0\", " \
+  "\"stateMessage\"=>[{\"lockType\"=>\"EXCLUSIVE\", 
\"description\"=>\"description\"}]" \
+"}\n\n",
+output)
+end
+
 define_test 'list server locks' do
   lock = create_exclusive_lock(0)
 



[06/22] hbase git commit: HBASE-19629 RawCell#getTags should return the Iterator in order to avoid iterating through whole tag array at once

2017-12-27 Thread zhangduo
HBASE-19629 RawCell#getTags should return the Iterator in order to avoid 
iterating through whole tag array at once

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/467a4667
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/467a4667
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/467a4667

Branch: refs/heads/HBASE-19397
Commit: 467a4667d8417680686fbd754cb52dc52069307a
Parents: 7145d98
Author: Vasudevan 
Authored: Wed Dec 27 21:29:28 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Dec 27 21:33:42 2017 +0800

--
 .../apache/hadoop/hbase/client/Mutation.java|  24 +---
 .../apache/hadoop/hbase/ByteBufferKeyValue.java |  34 --
 .../hadoop/hbase/IndividualBytesFieldCell.java  |  33 --
 .../java/org/apache/hadoop/hbase/KeyValue.java  |  37 +-
 .../apache/hadoop/hbase/PrivateCellUtil.java| 116 +--
 .../java/org/apache/hadoop/hbase/RawCell.java   |  10 +-
 .../io/encoding/BufferedDataBlockEncoder.java   |  62 --
 .../hadoop/hbase/TestByteBufferKeyValue.java|   2 +-
 .../org/apache/hadoop/hbase/TestKeyValue.java   |   2 +-
 .../hbase/codec/TestCellCodecWithTags.java  |   9 +-
 .../hbase/codec/TestKeyValueCodecWithTags.java  |   9 +-
 .../apache/hadoop/hbase/util/MapReduceCell.java |  34 --
 .../hbase/mapreduce/TestHFileOutputFormat2.java |   4 +-
 .../hbase/io/hfile/HFilePrettyPrinter.java  |   3 +-
 .../TestStoreFileScannerWithTagCompression.java |   3 +-
 .../hadoop/hbase/regionserver/TestTags.java |   4 +-
 .../wal/TestWALCellCodecWithCompression.java|   7 +-
 .../replication/TestReplicationWithTags.java|   4 +-
 .../TestVisibilityLabelsReplication.java|   9 +-
 19 files changed, 46 insertions(+), 360 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/467a4667/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
index 9472d70..8a29574 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.hbase.client;
 
-import static org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
-
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
@@ -33,7 +31,6 @@ import java.util.Optional;
 import java.util.TreeMap;
 import java.util.UUID;
 import java.util.stream.Collectors;
-import org.apache.hadoop.hbase.ArrayBackedTag;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScannable;
 import org.apache.hadoop.hbase.CellScanner;
@@ -60,7 +57,6 @@ import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
 import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
 import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
-import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput;
 import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput;
 import org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams;
@@ -915,35 +911,23 @@ public abstract class Mutation extends 
OperationWithAttributes implements Row, C
   if (cell instanceof RawCell) {
 return ((RawCell) cell).getTag(type);
   }
-  int length = getTagsLength();
-  int offset = getTagsOffset();
-  int pos = offset;
-  while (pos < offset + length) {
-int tagLen = Bytes.readAsInt(getTagsArray(), pos, TAG_LENGTH_SIZE);
-if (getTagsArray()[pos + TAG_LENGTH_SIZE] == type) {
-  return Optional.of(new ArrayBackedTag(getTagsArray(), pos,
-tagLen + TAG_LENGTH_SIZE));
-}
-pos += TAG_LENGTH_SIZE + tagLen;
-  }
-  return Optional.empty();
+  return PrivateCellUtil.getTag(cell, type);
 }
 
 @Override
-public List getTags() {
+public Iterator getTags() {
   if (cell instanceof RawCell) {
 return ((RawCell) cell).getTags();
   }
-  return Lists.newArrayList(PrivateCellUtil.tagsIterator(cell));
+  return PrivateCellUtil.tagsIterator(cell);
 }
 
 @Override
 public byte[] cloneTags() {
   if (cell instanceof RawCell) {
 return ((RawCell) cell).cloneTags();
-  } else {
-return PrivateCellUtil.cloneTags(cell);
   }
+  return 

[11/22] hbase git commit: HBASE-19543 Abstract a replication storage interface to extract the zk specific code

2017-12-27 Thread zhangduo
HBASE-19543 Abstract a replication storage interface to extract the zk specific 
code


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f8d80fd0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f8d80fd0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f8d80fd0

Branch: refs/heads/HBASE-19397
Commit: f8d80fd020701dc25f76de5d953e28bd2d4dd9e1
Parents: ebdf261
Author: zhangduo 
Authored: Fri Dec 22 14:37:28 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 27 21:50:17 2017 +0800

--
 .../hadoop/hbase/util/CollectionUtils.java  |   3 +
 hbase-replication/pom.xml   |  12 +
 .../replication/ReplicationPeerStorage.java |  74 
 .../replication/ReplicationQueueStorage.java| 164 +++
 .../replication/ReplicationStateZKBase.java |   1 -
 .../replication/ReplicationStorageFactory.java  |  49 +++
 .../replication/ZKReplicationPeerStorage.java   | 164 +++
 .../replication/ZKReplicationQueueStorage.java  | 425 +++
 .../replication/ZKReplicationStorageBase.java   |  75 
 .../TestZKReplicationPeerStorage.java   | 171 
 .../TestZKReplicationQueueStorage.java  | 171 
 .../org/apache/hadoop/hbase/master/HMaster.java |  36 +-
 .../hadoop/hbase/master/MasterServices.java |   6 +-
 .../master/procedure/MasterProcedureEnv.java|  24 +-
 .../master/replication/AddPeerProcedure.java|   6 +-
 .../replication/DisablePeerProcedure.java   |   7 +-
 .../master/replication/EnablePeerProcedure.java |   6 +-
 .../master/replication/ModifyPeerProcedure.java |  41 +-
 .../master/replication/RemovePeerProcedure.java |   6 +-
 .../master/replication/ReplicationManager.java  | 199 -
 .../replication/ReplicationPeerManager.java | 331 +++
 .../replication/UpdatePeerConfigProcedure.java  |   7 +-
 .../replication/TestReplicationAdmin.java   |  62 ++-
 .../hbase/master/MockNoopMasterServices.java|  10 +-
 .../hbase/master/TestMasterNoCluster.java   |   4 +-
 .../TestReplicationDisableInactivePeer.java |   6 +-
 26 files changed, 1749 insertions(+), 311 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f8d80fd0/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java
index 875b124..8bbb6f1 100644
--- 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java
+++ 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/CollectionUtils.java
@@ -107,6 +107,9 @@ public class CollectionUtils {
 return list.get(list.size() - 1);
   }
 
+  public static  List nullToEmpty(List list) {
+return list != null ? list : Collections.emptyList();
+  }
   /**
* In HBASE-16648 we found that ConcurrentHashMap.get is much faster than 
computeIfAbsent if the
* value already exists. Notice that the implementation does not guarantee 
that the supplier will

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8d80fd0/hbase-replication/pom.xml
--
diff --git a/hbase-replication/pom.xml b/hbase-replication/pom.xml
index ab22199..4e3cea0 100644
--- a/hbase-replication/pom.xml
+++ b/hbase-replication/pom.xml
@@ -121,6 +121,18 @@
   org.apache.hbase
   hbase-zookeeper
 
+
+  org.apache.hbase
+  hbase-common
+  test-jar
+  test
+
+
+  org.apache.hbase
+  hbase-zookeeper
+  test-jar
+  test
+
 
 
   org.apache.commons

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8d80fd0/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerStorage.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerStorage.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerStorage.java
new file mode 100644
index 000..e00cd0d
--- /dev/null
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeerStorage.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain 

[20/22] hbase git commit: HBASE-19580 Use slf4j instead of commons-logging in new, just-added Peer Procedure classes

2017-12-27 Thread zhangduo
HBASE-19580 Use slf4j instead of commons-logging in new, just-added Peer 
Procedure classes


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bb9dc5b9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bb9dc5b9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bb9dc5b9

Branch: refs/heads/HBASE-19397
Commit: bb9dc5b9e0857cb47de1db6c35f3aeaed7513409
Parents: 97ede8c
Author: zhangduo 
Authored: Thu Dec 21 21:59:46 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 27 21:50:17 2017 +0800

--
 .../hadoop/hbase/master/replication/AddPeerProcedure.java  | 6 +++---
 .../hadoop/hbase/master/replication/DisablePeerProcedure.java  | 6 +++---
 .../hadoop/hbase/master/replication/EnablePeerProcedure.java   | 6 +++---
 .../hadoop/hbase/master/replication/ModifyPeerProcedure.java   | 6 +++---
 .../hadoop/hbase/master/replication/RefreshPeerProcedure.java  | 6 +++---
 .../hadoop/hbase/master/replication/RemovePeerProcedure.java   | 6 +++---
 .../hbase/master/replication/UpdatePeerConfigProcedure.java| 6 +++---
 7 files changed, 21 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bb9dc5b9/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AddPeerProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AddPeerProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AddPeerProcedure.java
index c3862d8..066c3e7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AddPeerProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/AddPeerProcedure.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.master.replication;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
 import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
@@ -28,6 +26,8 @@ import 
org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer;
 import org.apache.hadoop.hbase.replication.ReplicationException;
 import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.AddPeerStateData;
 
@@ -37,7 +37,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.A
 @InterfaceAudience.Private
 public class AddPeerProcedure extends ModifyPeerProcedure {
 
-  private static final Log LOG = LogFactory.getLog(AddPeerProcedure.class);
+  private static final Logger LOG = 
LoggerFactory.getLogger(AddPeerProcedure.class);
 
   private ReplicationPeerConfig peerConfig;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/bb9dc5b9/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/DisablePeerProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/DisablePeerProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/DisablePeerProcedure.java
index 0b32db9..9a28de6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/DisablePeerProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/DisablePeerProcedure.java
@@ -19,11 +19,11 @@ package org.apache.hadoop.hbase.master.replication;
 
 import java.io.IOException;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
 import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
 import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The procedure for disabling a replication peer.
@@ -31,7 +31,7 @@ import org.apache.yetus.audience.InterfaceAudience;
 @InterfaceAudience.Private
 public class DisablePeerProcedure extends ModifyPeerProcedure {
 
-  private static final Log LOG = LogFactory.getLog(DisablePeerProcedure.class);
+  private static final Logger LOG = 
LoggerFactory.getLogger(DisablePeerProcedure.class);
 
   public DisablePeerProcedure() {
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/bb9dc5b9/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/EnablePeerProcedure.java
--

[05/22] hbase git commit: HBASE-19545 Replace getBytes(StandardCharsets.UTF_8) with Bytes.toBytes

2017-12-27 Thread zhangduo
HBASE-19545 Replace getBytes(StandardCharsets.UTF_8) with Bytes.toBytes

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7145d981
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7145d981
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7145d981

Branch: refs/heads/HBASE-19397
Commit: 7145d98182fb95f6f1c1119c3e779eed0bc322bb
Parents: 41c2dd0
Author: Peter Somogyi 
Authored: Wed Dec 20 21:17:52 2017 +0100
Committer: Chia-Ping Tsai 
Committed: Wed Dec 27 20:07:25 2017 +0800

--
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |  7 +-
 .../apache/hadoop/hbase/security/SaslUtil.java  |  3 +-
 .../hadoop/hbase/TestHColumnDescriptor.java |  8 +--
 .../hadoop/hbase/client/TestAsyncProcess.java   | 13 ++--
 .../hadoop/hbase/client/TestClientScanner.java  | 26 +++
 .../hadoop/hbase/client/TestDelayingRunner.java |  9 +--
 .../hadoop/hbase/client/TestOperation.java  | 76 +++-
 .../client/TestSimpleRequestController.java |  7 +-
 .../hbase/security/TestHBaseSaslRpcClient.java  | 27 +++
 .../org/apache/hadoop/hbase/util/Base64.java|  2 +-
 .../org/apache/hadoop/hbase/TestCellUtil.java   | 31 
 .../org/apache/hadoop/hbase/TestTableName.java  |  7 +-
 .../io/crypto/TestKeyStoreKeyProvider.java  |  4 +-
 .../apache/hadoop/hbase/types/TestStruct.java   | 14 ++--
 .../hbase/util/TestLoadTestKVGenerator.java |  9 ++-
 .../hadoop/hbase/util/TestOrderedBytes.java | 33 +
 .../hbase/client/example/HttpProxyExample.java  |  3 +-
 .../hbase/mapreduce/HFileOutputFormat2.java | 13 ++--
 18 files changed, 120 insertions(+), 172 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7145d981/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index af3916d..63310e6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -23,7 +23,6 @@ import com.google.protobuf.RpcController;
 import java.io.Closeable;
 import java.io.IOException;
 import java.io.InterruptedIOException;
-import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.EnumSet;
@@ -1675,8 +1674,8 @@ public class HBaseAdmin implements Admin {
 byte[][] encodedNameofRegionsToMerge = new 
byte[nameofRegionsToMerge.length][];
 for(int i = 0; i < nameofRegionsToMerge.length; i++) {
   encodedNameofRegionsToMerge[i] = 
HRegionInfo.isEncodedRegionName(nameofRegionsToMerge[i]) ?
-nameofRegionsToMerge[i] : 
HRegionInfo.encodeRegionName(nameofRegionsToMerge[i])
-  .getBytes(StandardCharsets.UTF_8);
+  nameofRegionsToMerge[i] :
+  Bytes.toBytes(HRegionInfo.encodeRegionName(nameofRegionsToMerge[i]));
 }
 
 TableName tableName = null;
@@ -1774,7 +1773,7 @@ public class HBaseAdmin implements Admin {
   public Future splitRegionAsync(byte[] regionName, byte[] splitPoint)
   throws IOException {
 byte[] encodedNameofRegionToSplit = 
HRegionInfo.isEncodedRegionName(regionName) ?
-regionName : 
HRegionInfo.encodeRegionName(regionName).getBytes(StandardCharsets.UTF_8);
+regionName : Bytes.toBytes(HRegionInfo.encodeRegionName(regionName));
 Pair pair = getRegion(regionName);
 if (pair != null) {
   if (pair.getFirst() != null &&

http://git-wip-us.apache.org/repos/asf/hbase/blob/7145d981/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
index d37abdf..7091df5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
@@ -28,6 +28,7 @@ import javax.security.sasl.SaslException;
 import javax.security.sasl.SaslServer;
 
 import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -73,7 +74,7 @@ public class SaslUtil {
   }
 
   static byte[] decodeIdentifier(String identifier) {
-return Base64.decodeBase64(identifier.getBytes(StandardCharsets.UTF_8));
+return 

[14/22] hbase git commit: HBASE-19599 Remove ReplicationQueuesClient, use ReplicationQueueStorage directly

2017-12-27 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/0d6b613b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
index 93b8649..1faaae3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/DumpReplicationQueues.java
@@ -21,6 +21,7 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
@@ -48,17 +49,18 @@ import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
 import org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
 import org.apache.hadoop.hbase.replication.ReplicationPeers;
 import org.apache.hadoop.hbase.replication.ReplicationQueueInfo;
+import org.apache.hadoop.hbase.replication.ReplicationQueueStorage;
 import org.apache.hadoop.hbase.replication.ReplicationQueues;
-import org.apache.hadoop.hbase.replication.ReplicationQueuesClient;
-import org.apache.hadoop.hbase.replication.ReplicationQueuesClientArguments;
+import org.apache.hadoop.hbase.replication.ReplicationQueuesArguments;
+import org.apache.hadoop.hbase.replication.ReplicationStorageFactory;
 import org.apache.hadoop.hbase.replication.ReplicationTracker;
 import org.apache.hadoop.hbase.zookeeper.ZKUtil;
 import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import 
org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.AtomicLongMap;
 
 /**
@@ -303,57 +305,53 @@ public class DumpReplicationQueues extends Configured 
implements Tool {
   }
 
   public String dumpQueues(ClusterConnection connection, ZKWatcher zkw, 
Set peerIds,
-   boolean hdfs) throws Exception {
-ReplicationQueuesClient queuesClient;
+  boolean hdfs) throws Exception {
+ReplicationQueueStorage queueStorage;
 ReplicationPeers replicationPeers;
 ReplicationQueues replicationQueues;
 ReplicationTracker replicationTracker;
-ReplicationQueuesClientArguments replicationArgs =
-new ReplicationQueuesClientArguments(getConf(), new 
WarnOnlyAbortable(), zkw);
+ReplicationQueuesArguments replicationArgs =
+new ReplicationQueuesArguments(getConf(), new WarnOnlyAbortable(), 
zkw);
 StringBuilder sb = new StringBuilder();
 
-queuesClient = 
ReplicationFactory.getReplicationQueuesClient(replicationArgs);
-queuesClient.init();
+queueStorage = ReplicationStorageFactory.getReplicationQueueStorage(zkw, 
getConf());
 replicationQueues = 
ReplicationFactory.getReplicationQueues(replicationArgs);
-replicationPeers = ReplicationFactory.getReplicationPeers(zkw, getConf(), 
queuesClient, connection);
+replicationPeers =
+ReplicationFactory.getReplicationPeers(zkw, getConf(), queueStorage, 
connection);
 replicationTracker = ReplicationFactory.getReplicationTracker(zkw, 
replicationPeers, getConf(),
   new WarnOnlyAbortable(), new WarnOnlyStoppable());
-List liveRegionServers = 
replicationTracker.getListOfRegionServers();
+Set liveRegionServers = new 
HashSet<>(replicationTracker.getListOfRegionServers());
 
 // Loops each peer on each RS and dumps the queues
-try {
-  List regionservers = queuesClient.getListOfReplicators();
-  if (regionservers == null || regionservers.isEmpty()) {
-return sb.toString();
+List regionservers = queueStorage.getListOfReplicators();
+if (regionservers == null || regionservers.isEmpty()) {
+  return sb.toString();
+}
+for (ServerName regionserver : regionservers) {
+  List queueIds = queueStorage.getAllQueues(regionserver);
+  replicationQueues.init(regionserver.getServerName());
+  if (!liveRegionServers.contains(regionserver.getServerName())) {
+deadRegionServers.add(regionserver.getServerName());
   }
-  for (String regionserver : regionservers) {
-List queueIds = queuesClient.getAllQueues(regionserver);
-replicationQueues.init(regionserver);
-if (!liveRegionServers.contains(regionserver)) {
-  deadRegionServers.add(regionserver);
-}
-for (String queueId : queueIds) {
-  ReplicationQueueInfo queueInfo = new ReplicationQueueInfo(queueId);
-  List wals = queuesClient.getLogsInQueue(regionserver, 
queueId);
-  if 

[15/22] hbase git commit: HBASE-19599 Remove ReplicationQueuesClient, use ReplicationQueueStorage directly

2017-12-27 Thread zhangduo
HBASE-19599 Remove ReplicationQueuesClient, use ReplicationQueueStorage directly


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0d6b613b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0d6b613b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0d6b613b

Branch: refs/heads/HBASE-19397
Commit: 0d6b613bb644945beada6a93be291bcd4a337ac9
Parents: f8d80fd
Author: zhangduo 
Authored: Mon Dec 25 18:49:56 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 27 21:50:17 2017 +0800

--
 .../hbase/replication/ReplicationFactory.java   |  19 +-
 .../replication/ReplicationPeersZKImpl.java |  24 +-
 .../replication/ReplicationQueueStorage.java|  26 +-
 .../replication/ReplicationQueuesClient.java|  93 -
 .../ReplicationQueuesClientArguments.java   |  40 --
 .../ReplicationQueuesClientZKImpl.java  | 176 -
 .../replication/ZKReplicationQueueStorage.java  |  90 -
 .../replication/TestReplicationStateBasic.java  | 378 +++
 .../replication/TestReplicationStateZKImpl.java | 148 
 .../TestZKReplicationQueueStorage.java  |  74 
 .../cleaner/ReplicationZKNodeCleaner.java   |  71 ++--
 .../cleaner/ReplicationZKNodeCleanerChore.java  |   5 +-
 .../replication/ReplicationPeerManager.java |  31 +-
 .../master/ReplicationHFileCleaner.java | 108 ++
 .../master/ReplicationLogCleaner.java   |  35 +-
 .../regionserver/DumpReplicationQueues.java |  77 ++--
 .../hbase/util/hbck/ReplicationChecker.java |  14 +-
 .../client/TestAsyncReplicationAdminApi.java|  31 +-
 .../replication/TestReplicationAdmin.java   |   2 +
 .../hbase/master/cleaner/TestLogsCleaner.java   |  29 +-
 .../cleaner/TestReplicationHFileCleaner.java|  58 +--
 .../cleaner/TestReplicationZKNodeCleaner.java   |  12 +-
 .../replication/TestReplicationStateBasic.java  | 378 ---
 .../replication/TestReplicationStateZKImpl.java | 227 ---
 .../TestReplicationSourceManagerZkImpl.java |  84 ++---
 25 files changed, 907 insertions(+), 1323 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0d6b613b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java
index 9f4ad18..6c1c213 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationFactory.java
@@ -1,5 +1,4 @@
-/*
- *
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -37,20 +36,14 @@ public class ReplicationFactory {
   args);
   }
 
-  public static ReplicationQueuesClient
-  getReplicationQueuesClient(ReplicationQueuesClientArguments args) throws 
Exception {
-return (ReplicationQueuesClient) ConstructorUtils
-.invokeConstructor(ReplicationQueuesClientZKImpl.class, args);
-  }
-
-  public static ReplicationPeers getReplicationPeers(final ZKWatcher zk, 
Configuration conf,
- Abortable abortable) {
+  public static ReplicationPeers getReplicationPeers(ZKWatcher zk, 
Configuration conf,
+  Abortable abortable) {
 return getReplicationPeers(zk, conf, null, abortable);
   }
 
-  public static ReplicationPeers getReplicationPeers(final ZKWatcher zk, 
Configuration conf,
- final 
ReplicationQueuesClient queuesClient, Abortable abortable) {
-return new ReplicationPeersZKImpl(zk, conf, queuesClient, abortable);
+  public static ReplicationPeers getReplicationPeers(ZKWatcher zk, 
Configuration conf,
+  ReplicationQueueStorage queueStorage, Abortable abortable) {
+return new ReplicationPeersZKImpl(zk, conf, queueStorage, abortable);
   }
 
   public static ReplicationTracker getReplicationTracker(ZKWatcher zookeeper,

http://git-wip-us.apache.org/repos/asf/hbase/blob/0d6b613b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ReplicationPeersZKImpl.java
index 8e2c5f4..f2e5647 100644
--- 

[19/22] hbase git commit: HBASE-19564 Procedure id is missing in the response of peer related operations

2017-12-27 Thread zhangduo
HBASE-19564 Procedure id is missing in the response of peer related operations


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/56c9b7f0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/56c9b7f0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/56c9b7f0

Branch: refs/heads/HBASE-19397
Commit: 56c9b7f04f7e1470e33bea0720d64ee93746816f
Parents: 80cb3e8
Author: zhangduo 
Authored: Wed Dec 20 20:57:37 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 27 21:50:17 2017 +0800

--
 .../hadoop/hbase/master/MasterRpcServices.java  | 24 ++--
 .../master/replication/ModifyPeerProcedure.java |  4 +---
 2 files changed, 13 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/56c9b7f0/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
index 700b363..9f71bab 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterRpcServices.java
@@ -1886,10 +1886,10 @@ public class MasterRpcServices extends RSRpcServices
   public AddReplicationPeerResponse addReplicationPeer(RpcController 
controller,
   AddReplicationPeerRequest request) throws ServiceException {
 try {
-  master.addReplicationPeer(request.getPeerId(),
-ReplicationPeerConfigUtil.convert(request.getPeerConfig()), 
request.getPeerState()
-.getState().equals(ReplicationState.State.ENABLED));
-  return AddReplicationPeerResponse.newBuilder().build();
+  long procId = master.addReplicationPeer(request.getPeerId(),
+ReplicationPeerConfigUtil.convert(request.getPeerConfig()),
+
request.getPeerState().getState().equals(ReplicationState.State.ENABLED));
+  return AddReplicationPeerResponse.newBuilder().setProcId(procId).build();
 } catch (ReplicationException | IOException e) {
   throw new ServiceException(e);
 }
@@ -1899,8 +1899,8 @@ public class MasterRpcServices extends RSRpcServices
   public RemoveReplicationPeerResponse removeReplicationPeer(RpcController 
controller,
   RemoveReplicationPeerRequest request) throws ServiceException {
 try {
-  master.removeReplicationPeer(request.getPeerId());
-  return RemoveReplicationPeerResponse.newBuilder().build();
+  long procId = master.removeReplicationPeer(request.getPeerId());
+  return 
RemoveReplicationPeerResponse.newBuilder().setProcId(procId).build();
 } catch (ReplicationException | IOException e) {
   throw new ServiceException(e);
 }
@@ -1910,8 +1910,8 @@ public class MasterRpcServices extends RSRpcServices
   public EnableReplicationPeerResponse enableReplicationPeer(RpcController 
controller,
   EnableReplicationPeerRequest request) throws ServiceException {
 try {
-  master.enableReplicationPeer(request.getPeerId());
-  return EnableReplicationPeerResponse.newBuilder().build();
+  long procId = master.enableReplicationPeer(request.getPeerId());
+  return 
EnableReplicationPeerResponse.newBuilder().setProcId(procId).build();
 } catch (ReplicationException | IOException e) {
   throw new ServiceException(e);
 }
@@ -1921,8 +1921,8 @@ public class MasterRpcServices extends RSRpcServices
   public DisableReplicationPeerResponse disableReplicationPeer(RpcController 
controller,
   DisableReplicationPeerRequest request) throws ServiceException {
 try {
-  master.disableReplicationPeer(request.getPeerId());
-  return DisableReplicationPeerResponse.newBuilder().build();
+  long procId = master.disableReplicationPeer(request.getPeerId());
+  return 
DisableReplicationPeerResponse.newBuilder().setProcId(procId).build();
 } catch (ReplicationException | IOException e) {
   throw new ServiceException(e);
 }
@@ -1948,9 +1948,9 @@ public class MasterRpcServices extends RSRpcServices
   public UpdateReplicationPeerConfigResponse 
updateReplicationPeerConfig(RpcController controller,
   UpdateReplicationPeerConfigRequest request) throws ServiceException {
 try {
-  master.updateReplicationPeerConfig(request.getPeerId(),
+  long procId = master.updateReplicationPeerConfig(request.getPeerId(),
 ReplicationPeerConfigUtil.convert(request.getPeerConfig()));
-  return UpdateReplicationPeerConfigResponse.newBuilder().build();
+  return 
UpdateReplicationPeerConfigResponse.newBuilder().setProcId(procId).build();
 } catch 

[08/22] hbase git commit: HBASE-19630 Add peer cluster key check when add new replication peer

2017-12-27 Thread zhangduo
HBASE-19630 Add peer cluster key check when add new replication peer

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bcfb848b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bcfb848b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bcfb848b

Branch: refs/heads/HBASE-19397
Commit: bcfb848b34f853fa5c7a7d1b4f43599da1fc0e96
Parents: 9a41109
Author: Guanghao Zhang 
Authored: Tue Dec 26 21:10:00 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 27 21:50:17 2017 +0800

--
 .../replication/ReplicationPeerManager.java | 54 
 .../replication/TestReplicationAdmin.java   | 22 
 2 files changed, 54 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bcfb848b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
index 84abfeb..b78cbce 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hbase.master.replication;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
@@ -42,6 +43,7 @@ import 
org.apache.hadoop.hbase.replication.ReplicationPeerStorage;
 import org.apache.hadoop.hbase.replication.ReplicationQueueInfo;
 import org.apache.hadoop.hbase.replication.ReplicationQueueStorage;
 import org.apache.hadoop.hbase.replication.ReplicationStorageFactory;
+import org.apache.hadoop.hbase.zookeeper.ZKConfig;
 import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
 import org.apache.yetus.audience.InterfaceAudience;
 
@@ -216,36 +218,36 @@ public final class ReplicationPeerManager {
 return desc != null ? Optional.of(desc.getPeerConfig()) : Optional.empty();
   }
 
-  /**
-   * If replicate_all flag is true, it means all user tables will be 
replicated to peer cluster.
-   * Then allow config exclude namespaces or exclude table-cfs which can't be 
replicated to peer
-   * cluster.
-   * 
-   * If replicate_all flag is false, it means all user tables can't be 
replicated to peer cluster.
-   * Then allow to config namespaces or table-cfs which will be replicated to 
peer cluster.
-   */
-  private static void checkPeerConfig(ReplicationPeerConfig peerConfig)
-  throws DoNotRetryIOException {
+  private void checkPeerConfig(ReplicationPeerConfig peerConfig) throws 
DoNotRetryIOException {
+checkClusterKey(peerConfig.getClusterKey());
+
 if (peerConfig.replicateAllUserTables()) {
-  if ((peerConfig.getNamespaces() != null && 
!peerConfig.getNamespaces().isEmpty()) ||
-(peerConfig.getTableCFsMap() != null && 
!peerConfig.getTableCFsMap().isEmpty())) {
-throw new DoNotRetryIOException("Need clean namespaces or table-cfs 
config firstly " +
-  "when you want replicate all cluster");
+  // If replicate_all flag is true, it means all user tables will be 
replicated to peer cluster.
+  // Then allow config exclude namespaces or exclude table-cfs which can't 
be replicated to peer
+  // cluster.
+  if ((peerConfig.getNamespaces() != null && 
!peerConfig.getNamespaces().isEmpty())
+  || (peerConfig.getTableCFsMap() != null && 
!peerConfig.getTableCFsMap().isEmpty())) {
+throw new DoNotRetryIOException("Need clean namespaces or table-cfs 
config firstly "
++ "when you want replicate all cluster");
   }
   
checkNamespacesAndTableCfsConfigConflict(peerConfig.getExcludeNamespaces(),
 peerConfig.getExcludeTableCFsMap());
 } else {
-  if ((peerConfig.getExcludeNamespaces() != null &&
-!peerConfig.getExcludeNamespaces().isEmpty()) ||
-(peerConfig.getExcludeTableCFsMap() != null &&
-  !peerConfig.getExcludeTableCFsMap().isEmpty())) {
+  // If replicate_all flag is false, it means all user tables can't be 
replicated to peer
+  // cluster. Then allow to config namespaces or table-cfs which will be 
replicated to peer
+  // cluster.
+  if ((peerConfig.getExcludeNamespaces() != null
+  && !peerConfig.getExcludeNamespaces().isEmpty())
+  || (peerConfig.getExcludeTableCFsMap() != null
+  && !peerConfig.getExcludeTableCFsMap().isEmpty())) {
 throw new DoNotRetryIOException(
-"Need clean 

[03/22] hbase git commit: HBASE-19637 Add .checkstyle to gitignore

2017-12-27 Thread zhangduo
HBASE-19637 Add .checkstyle to gitignore


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3317b873
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3317b873
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3317b873

Branch: refs/heads/HBASE-19397
Commit: 3317b8738d9028b935fc0cc0e645372d1027a5a7
Parents: 0e85a88
Author: zhangduo 
Authored: Wed Dec 27 11:19:02 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 27 11:24:35 2017 +0800

--
 .gitignore | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3317b873/.gitignore
--
diff --git a/.gitignore b/.gitignore
index 405edc0..0fce7d4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -18,3 +18,5 @@ dependency-reduced-pom.xml
 link_report/
 linklint-*.zip
 linklint/
+.checkstyle
+**/.checkstyle



[09/22] hbase git commit: HBASE-19216 Implement a general framework to execute remote procedure on RS

2017-12-27 Thread zhangduo
HBASE-19216 Implement a general framework to execute remote procedure on RS


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/48114790
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/48114790
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/48114790

Branch: refs/heads/HBASE-19397
Commit: 48114790cc5e65b522b39443a32d0f68b8874de6
Parents: 467a466
Author: zhangduo 
Authored: Fri Dec 15 21:06:44 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 27 21:50:17 2017 +0800

--
 .../hbase/procedure2/LockedResourceType.java|   4 +-
 .../procedure2/RemoteProcedureDispatcher.java   |  23 +-
 .../src/main/protobuf/Admin.proto   |   9 +-
 .../src/main/protobuf/MasterProcedure.proto |  30 +++
 .../src/main/protobuf/RegionServerStatus.proto  |  15 ++
 .../apache/hadoop/hbase/executor/EventType.java |  26 ++-
 .../hadoop/hbase/executor/ExecutorType.java |   3 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |  33 ++-
 .../hadoop/hbase/master/MasterRpcServices.java  |  13 ++
 .../assignment/RegionTransitionProcedure.java   |  18 +-
 .../procedure/MasterProcedureScheduler.java | 224 +--
 .../procedure/PeerProcedureInterface.java   |  34 +++
 .../master/procedure/RSProcedureDispatcher.java |  90 
 .../master/replication/ModifyPeerProcedure.java | 127 +++
 .../master/replication/RefreshPeerCallable.java |  67 ++
 .../replication/RefreshPeerProcedure.java   | 197 
 .../hbase/procedure2/RSProcedureCallable.java   |  43 
 .../hbase/regionserver/HRegionServer.java   |  69 +-
 .../hbase/regionserver/RSRpcServices.java   |  56 +++--
 .../handler/RSProcedureHandler.java |  51 +
 .../assignment/TestAssignmentManager.java   |  20 +-
 .../replication/DummyModifyPeerProcedure.java   |  41 
 .../TestDummyModifyPeerProcedure.java   |  80 +++
 .../security/access/TestAccessController.java   |   6 +-
 24 files changed, 1109 insertions(+), 170 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/48114790/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResourceType.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResourceType.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResourceType.java
index c5fe62b..dc9b5d4 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResourceType.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/LockedResourceType.java
@@ -1,4 +1,4 @@
-/*
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -22,5 +22,5 @@ import org.apache.yetus.audience.InterfaceAudience;
 
 @InterfaceAudience.Private
 public enum LockedResourceType {
-  SERVER, NAMESPACE, TABLE, REGION
+  SERVER, NAMESPACE, TABLE, REGION, PEER
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/48114790/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
index 54f2b08..e9a6906 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
@@ -226,13 +226,30 @@ public abstract class RemoteProcedureDispatcher {
+/**
+ * For building the remote operation.
+ */
 RemoteOperation remoteCallBuild(TEnv env, TRemote remote);
-void remoteCallCompleted(TEnv env, TRemote remote, RemoteOperation 
response);
+
+/**
+ * Called when the executeProcedure call is failed.
+ */
 void remoteCallFailed(TEnv env, TRemote remote, IOException exception);
+
+/**
+ * Called when RS tells the remote procedure is succeeded through the
+ * {@code reportProcedureDone} method.
+ */
+void remoteOperationCompleted(TEnv env);
+
+/**
+ * Called when RS tells the remote procedure is failed through the {@code 
reportProcedureDone}
+ * method.
+ * @param error the error message
+ */
+void remoteOperationFailed(TEnv 

[13/22] hbase git commit: HBASE-19536 Client side changes for moving peer modification from zk watcher to procedure

2017-12-27 Thread zhangduo
HBASE-19536 Client side changes for moving peer modification from zk watcher to 
procedure

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/80cb3e81
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/80cb3e81
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/80cb3e81

Branch: refs/heads/HBASE-19397
Commit: 80cb3e810f4d2f9dee16efd567397637553ac4a3
Parents: eb56ba1
Author: Guanghao Zhang 
Authored: Tue Dec 19 15:50:57 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 27 21:50:17 2017 +0800

--
 .../org/apache/hadoop/hbase/client/Admin.java   |  87 ++-
 .../apache/hadoop/hbase/client/HBaseAdmin.java  | 149 ++-
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java |  82 +-
 3 files changed, 238 insertions(+), 80 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/80cb3e81/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
index ff2722e..cf8e198 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Admin.java
@@ -2463,7 +2463,7 @@ public interface Admin extends Abortable, Closeable {
   /**
* Add a new replication peer for replicating data to slave cluster.
* @param peerId a short name that identifies the peer
-   * @param peerConfig configuration for the replication slave cluster
+   * @param peerConfig configuration for the replication peer
* @throws IOException if a remote or network exception occurs
*/
   default void addReplicationPeer(String peerId, ReplicationPeerConfig 
peerConfig)
@@ -2474,7 +2474,7 @@ public interface Admin extends Abortable, Closeable {
   /**
* Add a new replication peer for replicating data to slave cluster.
* @param peerId a short name that identifies the peer
-   * @param peerConfig configuration for the replication slave cluster
+   * @param peerConfig configuration for the replication peer
* @param enabled peer state, true if ENABLED and false if DISABLED
* @throws IOException if a remote or network exception occurs
*/
@@ -2482,6 +2482,37 @@ public interface Admin extends Abortable, Closeable {
   throws IOException;
 
   /**
+   * Add a new replication peer but does not block and wait for it.
+   * 
+   * You can use Future.get(long, TimeUnit) to wait on the operation to 
complete. It may throw
+   * ExecutionException if there was an error while executing the operation or 
TimeoutException in
+   * case the wait timeout was not long enough to allow the operation to 
complete.
+   * @param peerId a short name that identifies the peer
+   * @param peerConfig configuration for the replication peer
+   * @return the result of the async operation
+   * @throws IOException IOException if a remote or network exception occurs
+   */
+  default Future addReplicationPeerAsync(String peerId, 
ReplicationPeerConfig peerConfig)
+  throws IOException {
+return addReplicationPeerAsync(peerId, peerConfig, true);
+  }
+
+  /**
+   * Add a new replication peer but does not block and wait for it.
+   * 
+   * You can use Future.get(long, TimeUnit) to wait on the operation to 
complete. It may throw
+   * ExecutionException if there was an error while executing the operation or 
TimeoutException in
+   * case the wait timeout was not long enough to allow the operation to 
complete.
+   * @param peerId a short name that identifies the peer
+   * @param peerConfig configuration for the replication peer
+   * @param enabled peer state, true if ENABLED and false if DISABLED
+   * @return the result of the async operation
+   * @throws IOException IOException if a remote or network exception occurs
+   */
+  Future addReplicationPeerAsync(String peerId, ReplicationPeerConfig 
peerConfig,
+  boolean enabled) throws IOException;
+
+  /**
* Remove a peer and stop the replication.
* @param peerId a short name that identifies the peer
* @throws IOException if a remote or network exception occurs
@@ -2489,6 +2520,18 @@ public interface Admin extends Abortable, Closeable {
   void removeReplicationPeer(String peerId) throws IOException;
 
   /**
+   * Remove a replication peer but does not block and wait for it.
+   * 
+   * You can use Future.get(long, TimeUnit) to wait on the operation to 
complete. It may throw
+   * ExecutionException if there was an error while executing the operation or 
TimeoutException in
+   * case the wait timeout was not long enough to allow 

[17/22] hbase git commit: HBASE-19525 RS side changes for moving peer modification from zk watcher to procedure

2017-12-27 Thread zhangduo
HBASE-19525 RS side changes for moving peer modification from zk watcher to 
procedure


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ebdf261a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ebdf261a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ebdf261a

Branch: refs/heads/HBASE-19397
Commit: ebdf261afe43490ed7fdd3b3a8c4631d502f05ae
Parents: bb9dc5b
Author: huzheng 
Authored: Wed Dec 20 10:47:18 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 27 21:50:17 2017 +0800

--
 .../hadoop/hbase/protobuf/ProtobufUtil.java |  11 +-
 .../hbase/shaded/protobuf/ProtobufUtil.java |  13 +-
 .../hbase/replication/ReplicationListener.java  |  14 --
 .../hbase/replication/ReplicationPeer.java  |  28 ++-
 .../replication/ReplicationPeerZKImpl.java  | 180 ---
 .../replication/ReplicationPeersZKImpl.java |  19 +-
 .../replication/ReplicationTrackerZKImpl.java   |  73 +-
 .../regionserver/ReplicationSourceService.java  |   9 +-
 .../handler/RSProcedureHandler.java |   3 +
 .../replication/BaseReplicationEndpoint.java|   2 +-
 .../regionserver/PeerProcedureHandler.java  |  38 
 .../regionserver/PeerProcedureHandlerImpl.java  |  81 +++
 .../regionserver/RefreshPeerCallable.java   |  39 +++-
 .../replication/regionserver/Replication.java   |  10 +
 .../regionserver/ReplicationSource.java |   9 +-
 .../regionserver/ReplicationSourceManager.java  |  37 ++-
 .../replication/TestReplicationAdmin.java   |   2 +-
 .../TestReplicationAdminUsingProcedure.java | 226 +++
 .../replication/DummyModifyPeerProcedure.java   |  48 
 .../TestDummyModifyPeerProcedure.java   |  80 ---
 .../TestReplicationTrackerZKImpl.java   |  51 -
 .../TestReplicationSourceManager.java   |  32 ++-
 22 files changed, 533 insertions(+), 472 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ebdf261a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 267dc7a..d5285dc 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hbase.protobuf;
 
+import static org.apache.hadoop.hbase.protobuf.ProtobufMagic.PB_MAGIC;
+
 import com.google.protobuf.ByteString;
 import com.google.protobuf.CodedInputStream;
 import com.google.protobuf.InvalidProtocolBufferException;
@@ -199,7 +201,7 @@ public final class ProtobufUtil {
* byte array that is bytes.length plus {@link 
ProtobufMagic#PB_MAGIC}.length.
*/
   public static byte [] prependPBMagic(final byte [] bytes) {
-return Bytes.add(ProtobufMagic.PB_MAGIC, bytes);
+return Bytes.add(PB_MAGIC, bytes);
   }
 
   /**
@@ -224,10 +226,11 @@ public final class ProtobufUtil {
* @param bytes bytes to check
* @throws DeserializationException if we are missing the pb magic prefix
*/
-  public static void expectPBMagicPrefix(final byte [] bytes) throws 
DeserializationException {
+  public static void expectPBMagicPrefix(final byte[] bytes) throws 
DeserializationException {
 if (!isPBMagicPrefix(bytes)) {
-  throw new DeserializationException("Missing pb magic " +
-  Bytes.toString(ProtobufMagic.PB_MAGIC) + " prefix");
+  String bytesPrefix = bytes == null ? "null" : 
Bytes.toStringBinary(bytes, 0, PB_MAGIC.length);
+  throw new DeserializationException(
+  "Missing pb magic " + Bytes.toString(PB_MAGIC) + " prefix, bytes: " 
+ bytesPrefix);
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/ebdf261a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
index 17b1141..8954d04 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.hbase.shaded.protobuf;
 
+import static org.apache.hadoop.hbase.protobuf.ProtobufMagic.PB_MAGIC;
+
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
@@ -282,7 +284,7 @@ 

[21/22] hbase git commit: HBASE-19520 Add UTs for the new lock type PEER

2017-12-27 Thread zhangduo
HBASE-19520 Add UTs for the new lock type PEER

Signed-off-by: zhangduo 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/97ede8c3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/97ede8c3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/97ede8c3

Branch: refs/heads/HBASE-19397
Commit: 97ede8c36a19d70091150b44240255f0634ef4e3
Parents: 56c9b7f
Author: Guanghao Zhang 
Authored: Wed Dec 20 16:43:38 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 27 21:50:17 2017 +0800

--
 .../procedure/MasterProcedureScheduler.java |   9 +-
 .../procedure/TestMasterProcedureScheduler.java |  65 -
 ...TestMasterProcedureSchedulerConcurrency.java | 135 +++
 3 files changed, 201 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/97ede8c3/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
index dd85f5c..5f4665c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
@@ -389,6 +389,13 @@ public class MasterProcedureScheduler extends 
AbstractProcedureScheduler {
 while (tableIter.hasNext()) {
   count += tableIter.next().size();
 }
+
+// Peer queues
+final AvlTreeIterator peerIter = new AvlTreeIterator<>(peerMap);
+while (peerIter.hasNext()) {
+  count += peerIter.next().size();
+}
+
 return count;
   }
 
@@ -1041,7 +1048,7 @@ public class MasterProcedureScheduler extends 
AbstractProcedureScheduler {
* @see #wakePeerExclusiveLock(Procedure, String)
* @param procedure the procedure trying to acquire the lock
* @param peerId peer to lock
-   * @return true if the procedure has to wait for the per to be available
+   * @return true if the procedure has to wait for the peer to be available
*/
   public boolean waitPeerExclusiveLock(Procedure procedure, String peerId) {
 schedLock();

http://git-wip-us.apache.org/repos/asf/hbase/blob/97ede8c3/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
index 0291165..fd77e1f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
@@ -905,6 +905,27 @@ public class TestMasterProcedureScheduler {
 }
   }
 
+  public static class TestPeerProcedure extends TestProcedure implements 
PeerProcedureInterface {
+private final String peerId;
+private final PeerOperationType opType;
+
+public TestPeerProcedure(long procId, String peerId, PeerOperationType 
opType) {
+  super(procId);
+  this.peerId = peerId;
+  this.opType = opType;
+}
+
+@Override
+public String getPeerId() {
+  return peerId;
+}
+
+@Override
+public PeerOperationType getPeerOperationType() {
+  return opType;
+}
+  }
+
   private static LockProcedure createLockProcedure(LockType lockType, long 
procId) throws Exception {
 LockProcedure procedure = new LockProcedure();
 
@@ -927,22 +948,19 @@ public class TestMasterProcedureScheduler {
 return createLockProcedure(LockType.SHARED, procId);
   }
 
-  private static void assertLockResource(LockedResource resource,
-  LockedResourceType resourceType, String resourceName)
-  {
+  private static void assertLockResource(LockedResource resource, 
LockedResourceType resourceType,
+  String resourceName) {
 assertEquals(resourceType, resource.getResourceType());
 assertEquals(resourceName, resource.getResourceName());
   }
 
-  private static void assertExclusiveLock(LockedResource resource, 
Procedure procedure)
-  {
+  private static void assertExclusiveLock(LockedResource resource, 
Procedure procedure) {
 assertEquals(LockType.EXCLUSIVE, resource.getLockType());
 assertEquals(procedure, resource.getExclusiveLockOwnerProcedure());
 assertEquals(0, resource.getSharedLockCount());
   }
 
-  private static void 

[07/22] hbase git commit: HBASE-19592 Add UTs to test retry on update zk failure

2017-12-27 Thread zhangduo
HBASE-19592 Add UTs to test retry on update zk failure


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/044385ec
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/044385ec
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/044385ec

Branch: refs/heads/HBASE-19397
Commit: 044385ecc1774bb867e5a6e70955701a093eda15
Parents: bcfb848
Author: zhangduo 
Authored: Tue Dec 26 20:39:00 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 27 21:50:17 2017 +0800

--
 .../replication/ReplicationPeerManager.java |   5 +-
 .../TestReplicationProcedureRetry.java  | 200 +++
 2 files changed, 202 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/044385ec/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
index b78cbce..f4ccce8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
@@ -53,7 +53,7 @@ import org.apache.yetus.audience.InterfaceAudience;
  * Used to add/remove a replication peer.
  */
 @InterfaceAudience.Private
-public final class ReplicationPeerManager {
+public class ReplicationPeerManager {
 
   private final ReplicationPeerStorage peerStorage;
 
@@ -61,8 +61,7 @@ public final class ReplicationPeerManager {
 
   private final ConcurrentMap peers;
 
-  private ReplicationPeerManager(ReplicationPeerStorage peerStorage,
-  ReplicationQueueStorage queueStorage,
+  ReplicationPeerManager(ReplicationPeerStorage peerStorage, 
ReplicationQueueStorage queueStorage,
   ConcurrentMap peers) {
 this.peerStorage = peerStorage;
 this.queueStorage = queueStorage;

http://git-wip-us.apache.org/repos/asf/hbase/blob/044385ec/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationProcedureRetry.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationProcedureRetry.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationProcedureRetry.java
new file mode 100644
index 000..ab35b46
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationProcedureRetry.java
@@ -0,0 +1,200 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.replication;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.anyBoolean;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.spy;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.master.HMaster;
+import org.apache.hadoop.hbase.master.replication.ReplicationPeerManager;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.testclassification.ReplicationTests;
+import org.apache.zookeeper.KeeperException;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.mockito.invocation.InvocationOnMock;
+
+/**
+ * 

hbase git commit: HBASE-19647 Logging cleanups; emit regionname when RegionTooBusyException inside RetriesExhausted... make netty connect/disconnect TRACE-level

2017-12-27 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 6d9081b86 -> 16cd91697


HBASE-19647 Logging cleanups; emit regionname when RegionTooBusyException 
inside RetriesExhausted... make netty connect/disconnect TRACE-level


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/16cd9169
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/16cd9169
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/16cd9169

Branch: refs/heads/master
Commit: 16cd9169768351fcdb6876d499888bd1834cca97
Parents: 6d9081b
Author: Michael Stack 
Authored: Wed Dec 27 11:55:35 2017 -0800
Committer: Michael Stack 
Committed: Wed Dec 27 11:55:42 2017 -0800

--
 .../RetriesExhaustedWithDetailsException.java   |  7 ++-
 ...estRetriesExhaustedWithDetailsException.java | 65 
 .../hbase/ipc/NettyRpcServerRequestDecoder.java | 19 ++
 .../hbase/regionserver/CompactingMemStore.java  |  8 +--
 .../hbase/regionserver/CompactionPipeline.java  | 21 +++
 .../regionserver/FlushLargeStoresPolicy.java| 39 ++--
 .../hadoop/hbase/regionserver/HRegion.java  |  2 +-
 .../MemStoreCompactionStrategy.java |  7 +--
 .../hbase/regionserver/MemStoreCompactor.java   |  7 +--
 .../hadoop/hbase/regionserver/Segment.java  | 14 ++---
 10 files changed, 119 insertions(+), 70 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/16cd9169/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
index cb00675..e7eda2a 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
@@ -1,5 +1,4 @@
 /*
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -30,6 +29,7 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.hadoop.hbase.DoNotRetryIOException;
+import org.apache.hadoop.hbase.RegionTooBusyException;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -154,7 +154,9 @@ extends RetriesExhaustedException {
 for (Throwable t : ths) {
   if (t == null) continue;
   String name = "";
-  if (t instanceof DoNotRetryIOException) {
+  if (t instanceof DoNotRetryIOException ||
+  t instanceof RegionTooBusyException) {
+// If RegionTooBusyException, print message since it has Region name 
in it.
 name = t.getMessage();
   } else {
 name = t.getClass().getSimpleName();
@@ -181,5 +183,4 @@ extends RetriesExhaustedException {
 }
 return classificatons.toString();
   }
-
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/16cd9169/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestRetriesExhaustedWithDetailsException.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestRetriesExhaustedWithDetailsException.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestRetriesExhaustedWithDetailsException.java
new file mode 100644
index 000..46f44fa
--- /dev/null
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestRetriesExhaustedWithDetailsException.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import org.apache.avro.generic.GenericData;
+import org.apache.hadoop.hbase.CategoryBasedTimeout;
+import org.apache.hadoop.hbase.RegionTooBusyException;
+import 

hbase git commit: HBASE-19647 Logging cleanups; emit regionname when RegionTooBusyException inside RetriesExhausted... make netty connect/disconnect TRACE-level

2017-12-27 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 b9f44bcfe -> cc8b37f84


HBASE-19647 Logging cleanups; emit regionname when RegionTooBusyException 
inside RetriesExhausted... make netty connect/disconnect TRACE-level


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cc8b37f8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cc8b37f8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cc8b37f8

Branch: refs/heads/branch-2
Commit: cc8b37f84731894c4846e29907ba81b4137122b2
Parents: b9f44bc
Author: Michael Stack 
Authored: Wed Dec 27 11:55:35 2017 -0800
Committer: Michael Stack 
Committed: Wed Dec 27 11:56:40 2017 -0800

--
 .../RetriesExhaustedWithDetailsException.java   |  7 ++-
 ...estRetriesExhaustedWithDetailsException.java | 65 
 .../hbase/ipc/NettyRpcServerRequestDecoder.java | 19 ++
 .../hbase/regionserver/CompactingMemStore.java  |  8 +--
 .../hbase/regionserver/CompactionPipeline.java  | 21 +++
 .../regionserver/FlushLargeStoresPolicy.java| 39 ++--
 .../hadoop/hbase/regionserver/HRegion.java  |  2 +-
 .../MemStoreCompactionStrategy.java |  7 +--
 .../hbase/regionserver/MemStoreCompactor.java   |  7 +--
 .../hadoop/hbase/regionserver/Segment.java  | 14 ++---
 10 files changed, 119 insertions(+), 70 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cc8b37f8/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
index cb00675..e7eda2a 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
@@ -1,5 +1,4 @@
 /*
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -30,6 +29,7 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.hadoop.hbase.DoNotRetryIOException;
+import org.apache.hadoop.hbase.RegionTooBusyException;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -154,7 +154,9 @@ extends RetriesExhaustedException {
 for (Throwable t : ths) {
   if (t == null) continue;
   String name = "";
-  if (t instanceof DoNotRetryIOException) {
+  if (t instanceof DoNotRetryIOException ||
+  t instanceof RegionTooBusyException) {
+// If RegionTooBusyException, print message since it has Region name 
in it.
 name = t.getMessage();
   } else {
 name = t.getClass().getSimpleName();
@@ -181,5 +183,4 @@ extends RetriesExhaustedException {
 }
 return classificatons.toString();
   }
-
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/cc8b37f8/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestRetriesExhaustedWithDetailsException.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestRetriesExhaustedWithDetailsException.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestRetriesExhaustedWithDetailsException.java
new file mode 100644
index 000..46f44fa
--- /dev/null
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestRetriesExhaustedWithDetailsException.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import org.apache.avro.generic.GenericData;
+import org.apache.hadoop.hbase.CategoryBasedTimeout;
+import org.apache.hadoop.hbase.RegionTooBusyException;

hbase git commit: HBASE-19644 add the checkstyle rule to reject the illegal imports

2017-12-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 16cd91697 -> 01b1f48cc


HBASE-19644 add the checkstyle rule to reject the illegal imports


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/01b1f48c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/01b1f48c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/01b1f48c

Branch: refs/heads/master
Commit: 01b1f48ccd93e1b5173c3a6593b428ed5a014961
Parents: 16cd916
Author: Chia-Ping Tsai 
Authored: Thu Dec 28 04:10:42 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Dec 28 04:10:42 2017 +0800

--
 .../TestIncrementalBackupMergeWithFailures.java |  3 +-
 .../src/main/resources/hbase/checkstyle.xml |  4 ++-
 .../client/AsyncBufferedMutatorBuilderImpl.java |  5 ++--
 .../hbase/client/ConnectionImplementation.java  |  6 ++--
 .../apache/hadoop/hbase/client/MultiAction.java |  7 +
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java |  4 +--
 .../client/replication/ReplicationAdmin.java| 10 +++
 .../apache/hadoop/hbase/master/RegionState.java |  3 +-
 .../hadoop/hbase/util/ByteBufferArray.java  |  3 +-
 .../regionserver/MetricsTableLatenciesImpl.java |  3 +-
 .../apache/hadoop/hbase/http/HttpServer.java| 29 +---
 .../hbase/procedure2/ProcedureScheduler.java|  3 +-
 .../hadoop/hbase/rsgroup/TestRSGroupsBase.java  |  2 +-
 .../hbase/coprocessor/ObserverContextImpl.java  |  4 +--
 .../hbase/io/hfile/bucket/BucketCache.java  | 10 +++
 .../hbase/io/hfile/bucket/FileIOEngine.java | 15 +-
 .../master/balancer/StochasticLoadBalancer.java |  4 +--
 .../master/procedure/RecoverMetaProcedure.java  |  3 +-
 .../regionserver/VersionedSegmentsList.java |  5 ++--
 .../hbase/wal/NettyAsyncFSWALConfigHelper.java  | 10 +++
 .../hbase/client/TestAsyncClusterAdminApi.java  |  5 ++--
 .../hadoop/hbase/filter/TestFilterList.java |  4 +--
 .../hbase/io/hfile/bucket/TestBucketCache.java  |  9 +++---
 .../spark/IntegrationTestSparkBulkLoad.java |  9 ++
 24 files changed, 69 insertions(+), 91 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/01b1f48c/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupMergeWithFailures.java
--
diff --git 
a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupMergeWithFailures.java
 
b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupMergeWithFailures.java
index 50ca1d9..0aa7f98 100644
--- 
a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupMergeWithFailures.java
+++ 
b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupMergeWithFailures.java
@@ -25,7 +25,6 @@ import static org.junit.Assert.assertTrue;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -49,7 +48,7 @@ import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
 
 @Category(LargeTests.class)
 public class TestIncrementalBackupMergeWithFailures extends TestBackupBase {

http://git-wip-us.apache.org/repos/asf/hbase/blob/01b1f48c/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
--
diff --git a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml 
b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
index 2240096..bdfea28 100644
--- a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
+++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
@@ -76,7 +76,9 @@
 
   
 
-
+
+  
+
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/01b1f48c/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutatorBuilderImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutatorBuilderImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutatorBuilderImpl.java
index a44bafa..9880ba5 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutatorBuilderImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutatorBuilderImpl.java
@@ -17,12 +17,11 @@
  */
 package org.apache.hadoop.hbase.client;
 
-import com.google.common.base.Preconditions;
-
 import 

hbase git commit: HBASE-19644 add the checkstyle rule to reject the illegal imports

2017-12-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 cc8b37f84 -> 7dee1bcd3


HBASE-19644 add the checkstyle rule to reject the illegal imports


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7dee1bcd
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7dee1bcd
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7dee1bcd

Branch: refs/heads/branch-2
Commit: 7dee1bcd315cbb67a5c70218cf19c0b2206b5f5f
Parents: cc8b37f
Author: Chia-Ping Tsai 
Authored: Thu Dec 28 04:04:20 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Dec 28 04:17:45 2017 +0800

--
 .../src/main/resources/hbase/checkstyle.xml |  4 ++-
 .../client/AsyncBufferedMutatorBuilderImpl.java |  5 ++--
 .../hbase/client/ConnectionImplementation.java  |  6 ++--
 .../apache/hadoop/hbase/client/MultiAction.java |  7 +
 .../hadoop/hbase/client/RawAsyncHBaseAdmin.java |  4 +--
 .../client/replication/ReplicationAdmin.java| 10 +++
 .../apache/hadoop/hbase/master/RegionState.java |  3 +-
 .../hadoop/hbase/util/ByteBufferArray.java  |  3 +-
 .../regionserver/MetricsTableLatenciesImpl.java |  3 +-
 .../apache/hadoop/hbase/http/HttpServer.java| 29 +---
 .../hbase/procedure2/ProcedureScheduler.java|  3 +-
 .../hadoop/hbase/rsgroup/TestRSGroupsBase.java  |  2 +-
 .../hbase/coprocessor/ObserverContextImpl.java  |  4 +--
 .../hbase/io/hfile/bucket/BucketCache.java  | 10 +++
 .../hbase/io/hfile/bucket/FileIOEngine.java | 15 +-
 .../master/balancer/StochasticLoadBalancer.java |  4 +--
 .../master/procedure/RecoverMetaProcedure.java  |  3 +-
 .../regionserver/VersionedSegmentsList.java |  5 ++--
 .../hbase/wal/NettyAsyncFSWALConfigHelper.java  | 10 +++
 .../hbase/client/TestAsyncClusterAdminApi.java  |  5 ++--
 .../hadoop/hbase/filter/TestFilterList.java |  4 +--
 .../hbase/io/hfile/bucket/TestBucketCache.java  |  9 +++---
 22 files changed, 66 insertions(+), 82 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7dee1bcd/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
--
diff --git a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml 
b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
index 2240096..bdfea28 100644
--- a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
+++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
@@ -76,7 +76,9 @@
 
   
 
-
+
+  
+
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/7dee1bcd/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutatorBuilderImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutatorBuilderImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutatorBuilderImpl.java
index a44bafa..9880ba5 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutatorBuilderImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncBufferedMutatorBuilderImpl.java
@@ -17,12 +17,11 @@
  */
 package org.apache.hadoop.hbase.client;
 
-import com.google.common.base.Preconditions;
-
 import java.util.concurrent.TimeUnit;
-
 import org.apache.yetus.audience.InterfaceAudience;
 
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+
 /**
  * The implementation of {@link AsyncBufferedMutatorBuilder}.
  */

http://git-wip-us.apache.org/repos/asf/hbase/blob/7dee1bcd/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
index 1f34dba..f6c207a 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
@@ -25,10 +25,7 @@ import static 
org.apache.hadoop.hbase.client.MetricsConnection.CLIENT_SIDE_METRI
 import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
 import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsentEx;
 
-import com.google.common.base.Throwables;
-
 import edu.umd.cs.findbugs.annotations.Nullable;
-
 import java.io.Closeable;
 import java.io.IOException;
 import java.io.InterruptedIOException;
@@ -48,7 +45,6 @@ import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 import 

[1/3] hbase git commit: HBASE-19628 ByteBufferCell should extend ExtendedCell

2017-12-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 467a4667d -> 6b39062e8


http://git-wip-us.apache.org/repos/asf/hbase/blob/6b39062e/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index aec9566..a0246a0 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -39,7 +39,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.util.MapReduceCell;
+import org.apache.hadoop.hbase.util.MapReduceExtendedCell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
@@ -674,7 +674,7 @@ public class TestImportExport {
   @Override
   public Void answer(InvocationOnMock invocation) throws Throwable {
 ImmutableBytesWritable writer = (ImmutableBytesWritable) 
invocation.getArgument(0);
-MapReduceCell key = (MapReduceCell) invocation.getArgument(1);
+MapReduceExtendedCell key = (MapReduceExtendedCell) 
invocation.getArgument(1);
 assertEquals("Key", Bytes.toString(writer.get()));
 assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
 return null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/6b39062e/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
index 23b3c04..1af9b1d 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
@@ -47,12 +47,12 @@ import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.mapreduce.WALPlayer.WALKeyValueMapper;
 import org.apache.hadoop.hbase.util.FSUtils;
+import org.apache.hadoop.hbase.util.MapReduceExtendedCell;
 import org.apache.hadoop.hbase.wal.WALEdit;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MapReduceTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.LauncherSecurityManager;
-import org.apache.hadoop.hbase.util.MapReduceCell;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.wal.WALKey;
 import org.apache.hadoop.mapreduce.Mapper;
@@ -185,7 +185,7 @@ public class TestWALPlayer {
   @Override
   public Void answer(InvocationOnMock invocation) throws Throwable {
 ImmutableBytesWritable writer = (ImmutableBytesWritable) 
invocation.getArgument(0);
-MapReduceCell key = (MapReduceCell) invocation.getArgument(1);
+MapReduceExtendedCell key = (MapReduceExtendedCell) 
invocation.getArgument(1);
 assertEquals("row", Bytes.toString(writer.get()));
 assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
 return null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/6b39062e/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
index 50d5ddc..cfc3dd9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hbase.ByteBufferCell;
+import org.apache.hadoop.hbase.ByteBufferExtendedCell;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.CellUtil;
@@ -383,14 +383,15 @@ public class HFileWriterImpl implements HFile.Writer {
   + CellUtil.getCellKeyAsString(left) + ", right=" + 
CellUtil.getCellKeyAsString(right));
 }
 byte[] midRow;
-boolean bufferBacked = left instanceof ByteBufferCell && right instanceof 
ByteBufferCell;
+boolean bufferBacked = left instanceof ByteBufferExtendedCell
+  

[2/3] hbase git commit: HBASE-19628 ByteBufferCell should extend ExtendedCell

2017-12-27 Thread chia7712
http://git-wip-us.apache.org/repos/asf/hbase/blob/6b39062e/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java
index 363f0bd..69eaab0 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java
@@ -83,17 +83,17 @@ public final class PrivateCellUtil {
   /* misc */
 
   public static byte getRowByte(Cell cell, int index) {
-if (cell instanceof ByteBufferCell) {
-  return ((ByteBufferCell) cell).getRowByteBuffer()
-  .get(((ByteBufferCell) cell).getRowPosition() + index);
+if (cell instanceof ByteBufferExtendedCell) {
+  return ((ByteBufferExtendedCell) cell).getRowByteBuffer()
+  .get(((ByteBufferExtendedCell) cell).getRowPosition() + index);
 }
 return cell.getRowArray()[cell.getRowOffset() + index];
   }
 
   public static byte getQualifierByte(Cell cell, int index) {
-if (cell instanceof ByteBufferCell) {
-  return ((ByteBufferCell) cell).getQualifierByteBuffer()
-  .get(((ByteBufferCell) cell).getQualifierPosition() + index);
+if (cell instanceof ByteBufferExtendedCell) {
+  return ((ByteBufferExtendedCell) cell).getQualifierByteBuffer()
+  .get(((ByteBufferExtendedCell) cell).getQualifierPosition() + index);
 }
 return cell.getQualifierArray()[cell.getQualifierOffset() + index];
   }
@@ -115,15 +115,16 @@ public final class PrivateCellUtil {
* @return A new cell which is having the extra tags also added to it.
*/
   public static Cell createCell(Cell cell, byte[] tags) {
-if (cell instanceof ByteBufferCell) {
-  return new TagRewriteByteBufferCell((ByteBufferCell) cell, tags);
+if (cell instanceof ByteBufferExtendedCell) {
+  return new TagRewriteByteBufferExtendedCell((ByteBufferExtendedCell) 
cell, tags);
 }
 return new TagRewriteCell(cell, tags);
   }
 
   public static Cell createCell(Cell cell, byte[] value, byte[] tags) {
-if (cell instanceof ByteBufferCell) {
-  return new ValueAndTagRewriteByteBufferCell((ByteBufferCell) cell, 
value, tags);
+if (cell instanceof ByteBufferExtendedCell) {
+  return new 
ValueAndTagRewriteByteBufferExtendedCell((ByteBufferExtendedCell) cell,
+  value, tags);
 }
 return new ValueAndTagRewriteCell(cell, value, tags);
   }
@@ -314,25 +315,24 @@ public final class PrivateCellUtil {
 }
   }
 
-  static class TagRewriteByteBufferCell extends ByteBufferCell implements 
ExtendedCell {
+  static class TagRewriteByteBufferExtendedCell extends ByteBufferExtendedCell 
{
 
-protected ByteBufferCell cell;
+protected ByteBufferExtendedCell cell;
 protected byte[] tags;
 private static final int HEAP_SIZE_OVERHEAD = ClassSize.OBJECT + 2 * 
ClassSize.REFERENCE;
 
 /**
- * @param cell The original ByteBufferCell which it rewrites
+ * @param cell The original ByteBufferExtendedCell which it rewrites
  * @param tags the tags bytes. The array suppose to contain the tags bytes 
alone.
  */
-public TagRewriteByteBufferCell(ByteBufferCell cell, byte[] tags) {
-  assert cell instanceof ExtendedCell;
+public TagRewriteByteBufferExtendedCell(ByteBufferExtendedCell cell, 
byte[] tags) {
   assert tags != null;
   this.cell = cell;
   this.tags = tags;
   // tag offset will be treated as 0 and length this.tags.length
-  if (this.cell instanceof TagRewriteByteBufferCell) {
+  if (this.cell instanceof TagRewriteByteBufferExtendedCell) {
 // Cleaning the ref so that the byte[] can be GCed
-((TagRewriteByteBufferCell) this.cell).tags = null;
+((TagRewriteByteBufferExtendedCell) this.cell).tags = null;
   }
 }
 
@@ -490,8 +490,9 @@ public final class PrivateCellUtil {
 @Override
 public ExtendedCell deepClone() {
   Cell clonedBaseCell = ((ExtendedCell) this.cell).deepClone();
-  if (clonedBaseCell instanceof ByteBufferCell) {
-return new TagRewriteByteBufferCell((ByteBufferCell) clonedBaseCell, 
this.tags);
+  if (clonedBaseCell instanceof ByteBufferExtendedCell) {
+return new TagRewriteByteBufferExtendedCell((ByteBufferExtendedCell) 
clonedBaseCell,
+this.tags);
   }
   return new TagRewriteCell(clonedBaseCell, this.tags);
 }
@@ -585,7 +586,10 @@ public final class PrivateCellUtil {
   return write(out, withTags, this.cell, this.value, this.tags);
 }
 
-// Made into a static method so as to reuse the logic within 
ValueAndTagRewriteByteBufferCell
+/**
+ * Made into a static method so as to reuse the logic within
+ * ValueAndTagRewriteByteBufferExtendedCell
+

[2/3] hbase git commit: HBASE-19628 ByteBufferCell should extend ExtendedCell

2017-12-27 Thread chia7712
http://git-wip-us.apache.org/repos/asf/hbase/blob/2468f300/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java
index 363f0bd..f024a14 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/PrivateCellUtil.java
@@ -83,17 +83,17 @@ public final class PrivateCellUtil {
   /* misc */
 
   public static byte getRowByte(Cell cell, int index) {
-if (cell instanceof ByteBufferCell) {
-  return ((ByteBufferCell) cell).getRowByteBuffer()
-  .get(((ByteBufferCell) cell).getRowPosition() + index);
+if (cell instanceof ByteBufferExtendedCell) {
+  return ((ByteBufferExtendedCell) cell).getRowByteBuffer()
+  .get(((ByteBufferExtendedCell) cell).getRowPosition() + index);
 }
 return cell.getRowArray()[cell.getRowOffset() + index];
   }
 
   public static byte getQualifierByte(Cell cell, int index) {
-if (cell instanceof ByteBufferCell) {
-  return ((ByteBufferCell) cell).getQualifierByteBuffer()
-  .get(((ByteBufferCell) cell).getQualifierPosition() + index);
+if (cell instanceof ByteBufferExtendedCell) {
+  return ((ByteBufferExtendedCell) cell).getQualifierByteBuffer()
+  .get(((ByteBufferExtendedCell) cell).getQualifierPosition() + index);
 }
 return cell.getQualifierArray()[cell.getQualifierOffset() + index];
   }
@@ -115,15 +115,16 @@ public final class PrivateCellUtil {
* @return A new cell which is having the extra tags also added to it.
*/
   public static Cell createCell(Cell cell, byte[] tags) {
-if (cell instanceof ByteBufferCell) {
-  return new TagRewriteByteBufferCell((ByteBufferCell) cell, tags);
+if (cell instanceof ByteBufferExtendedCell) {
+  return new TagRewriteByteBufferExtendedCell((ByteBufferExtendedCell) 
cell, tags);
 }
 return new TagRewriteCell(cell, tags);
   }
 
   public static Cell createCell(Cell cell, byte[] value, byte[] tags) {
-if (cell instanceof ByteBufferCell) {
-  return new ValueAndTagRewriteByteBufferCell((ByteBufferCell) cell, 
value, tags);
+if (cell instanceof ByteBufferExtendedCell) {
+  return new 
ValueAndTagRewriteByteBufferExtendedCell((ByteBufferExtendedCell) cell,
+  value, tags);
 }
 return new ValueAndTagRewriteCell(cell, value, tags);
   }
@@ -314,25 +315,24 @@ public final class PrivateCellUtil {
 }
   }
 
-  static class TagRewriteByteBufferCell extends ByteBufferCell implements 
ExtendedCell {
+  static class TagRewriteByteBufferExtendedCell extends ByteBufferExtendedCell 
{
 
-protected ByteBufferCell cell;
+protected ByteBufferExtendedCell cell;
 protected byte[] tags;
 private static final int HEAP_SIZE_OVERHEAD = ClassSize.OBJECT + 2 * 
ClassSize.REFERENCE;
 
 /**
- * @param cell The original ByteBufferCell which it rewrites
+ * @param cell The original ByteBufferExtendedCell which it rewrites
  * @param tags the tags bytes. The array suppose to contain the tags bytes 
alone.
  */
-public TagRewriteByteBufferCell(ByteBufferCell cell, byte[] tags) {
-  assert cell instanceof ExtendedCell;
+public TagRewriteByteBufferExtendedCell(ByteBufferExtendedCell cell, 
byte[] tags) {
   assert tags != null;
   this.cell = cell;
   this.tags = tags;
   // tag offset will be treated as 0 and length this.tags.length
-  if (this.cell instanceof TagRewriteByteBufferCell) {
+  if (this.cell instanceof TagRewriteByteBufferExtendedCell) {
 // Cleaning the ref so that the byte[] can be GCed
-((TagRewriteByteBufferCell) this.cell).tags = null;
+((TagRewriteByteBufferExtendedCell) this.cell).tags = null;
   }
 }
 
@@ -490,8 +490,9 @@ public final class PrivateCellUtil {
 @Override
 public ExtendedCell deepClone() {
   Cell clonedBaseCell = ((ExtendedCell) this.cell).deepClone();
-  if (clonedBaseCell instanceof ByteBufferCell) {
-return new TagRewriteByteBufferCell((ByteBufferCell) clonedBaseCell, 
this.tags);
+  if (clonedBaseCell instanceof ByteBufferExtendedCell) {
+return new TagRewriteByteBufferExtendedCell((ByteBufferExtendedCell) 
clonedBaseCell,
+this.tags);
   }
   return new TagRewriteCell(clonedBaseCell, this.tags);
 }
@@ -585,7 +586,10 @@ public final class PrivateCellUtil {
   return write(out, withTags, this.cell, this.value, this.tags);
 }
 
-// Made into a static method so as to reuse the logic within 
ValueAndTagRewriteByteBufferCell
+/**
+ * Made into a static method so as to reuse the logic within
+ * ValueAndTagRewriteByteBufferExtendedCell
+

[3/3] hbase git commit: HBASE-19628 ByteBufferCell should extend ExtendedCell

2017-12-27 Thread chia7712
HBASE-19628 ByteBufferCell should extend ExtendedCell


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2468f300
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2468f300
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2468f300

Branch: refs/heads/branch-2
Commit: 2468f300299fc37553cfb292d780aa9b255343e7
Parents: 0454a38
Author: Chia-Ping Tsai 
Authored: Wed Dec 27 22:45:46 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Dec 27 23:02:18 2017 +0800

--
 .../hadoop/hbase/filter/ColumnPrefixFilter.java |   8 +-
 .../hadoop/hbase/filter/KeyOnlyFilter.java  |  57 ++-
 .../hadoop/hbase/filter/PrefixFilter.java   |   8 +-
 .../hbase/shaded/protobuf/ProtobufUtil.java |  20 +-
 .../hadoop/hbase/filter/TestKeyOnlyFilter.java  |   6 +-
 .../org/apache/hadoop/hbase/ByteBufferCell.java | 120 -
 .../hadoop/hbase/ByteBufferExtendedCell.java| 121 +
 .../hadoop/hbase/ByteBufferKeyOnlyKeyValue.java |  44 +-
 .../apache/hadoop/hbase/ByteBufferKeyValue.java |   6 +-
 .../apache/hadoop/hbase/CellComparatorImpl.java |  72 +--
 .../java/org/apache/hadoop/hbase/CellUtil.java  | 298 +--
 .../hadoop/hbase/ExtendedCellBuilder.java   |   2 +-
 .../apache/hadoop/hbase/PrivateCellUtil.java| 506 ++-
 .../java/org/apache/hadoop/hbase/TagUtil.java   |   6 +-
 .../io/encoding/BufferedDataBlockEncoder.java   |  14 +-
 .../hbase/io/encoding/RowIndexSeekerV1.java |   8 +-
 .../hadoop/hbase/TestByteBufferKeyValue.java|   4 +-
 .../hbase/io/TestTagCompressionContext.java |  10 +-
 .../hadoop/hbase/mapreduce/CellSortReducer.java |   4 +-
 .../hbase/mapreduce/HFileOutputFormat2.java |   8 +-
 .../apache/hadoop/hbase/mapreduce/Import.java   |  10 +-
 .../hadoop/hbase/mapreduce/WALPlayer.java   |   6 +-
 .../apache/hadoop/hbase/util/MapReduceCell.java | 271 --
 .../hbase/util/MapReduceExtendedCell.java   | 270 ++
 .../mapreduce/TestCellBasedImportExport2.java   |   6 +-
 .../mapreduce/TestCellBasedWALPlayer2.java  |   6 +-
 .../hadoop/hbase/io/hfile/HFileWriterImpl.java  |  29 +-
 .../hbase/regionserver/ByteBufferChunkCell.java |  48 --
 .../regionserver/ByteBufferChunkKeyValue.java   |  48 ++
 .../hadoop/hbase/regionserver/CellChunkMap.java |   2 +-
 .../hbase/regionserver/MemStoreLABImpl.java |   4 +-
 .../regionserver/NoTagByteBufferChunkCell.java  |  48 --
 .../NoTagByteBufferChunkKeyValue.java   |  48 ++
 .../hbase/regionserver/RSRpcServices.java   |   6 +-
 .../encoding/TestBufferedDataBlockEncoder.java  |   4 +-
 35 files changed, 1123 insertions(+), 1005 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2468f300/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
index 4811691..b4e7a0f 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
@@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.filter;
 import java.io.IOException;
 import java.util.ArrayList;
 
-import org.apache.hadoop.hbase.ByteBufferCell;
+import org.apache.hadoop.hbase.ByteBufferExtendedCell;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.PrivateCellUtil;
 import org.apache.yetus.audience.InterfaceAudience;
@@ -95,9 +95,9 @@ public class ColumnPrefixFilter extends FilterBase {
   }
 
   private static int compareQualifierPart(Cell cell, int length, byte[] 
prefix) {
-if (cell instanceof ByteBufferCell) {
-  return ByteBufferUtils.compareTo(((ByteBufferCell) 
cell).getQualifierByteBuffer(),
-  ((ByteBufferCell) cell).getQualifierPosition(), length, prefix, 0, 
length);
+if (cell instanceof ByteBufferExtendedCell) {
+  return ByteBufferUtils.compareTo(((ByteBufferExtendedCell) 
cell).getQualifierByteBuffer(),
+  ((ByteBufferExtendedCell) cell).getQualifierPosition(), length, 
prefix, 0, length);
 }
 return Bytes.compareTo(cell.getQualifierArray(), 
cell.getQualifierOffset(), length, prefix, 0,
 length);

http://git-wip-us.apache.org/repos/asf/hbase/blob/2468f300/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
index 32286b6..4487292 100644
--- 

[1/3] hbase git commit: HBASE-19628 ByteBufferCell should extend ExtendedCell

2017-12-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 0454a389a -> 2468f3002


http://git-wip-us.apache.org/repos/asf/hbase/blob/2468f300/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
index b51df3f..87c79ec 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
@@ -70,7 +70,7 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.LauncherSecurityManager;
-import org.apache.hadoop.hbase.util.MapReduceCell;
+import org.apache.hadoop.hbase.util.MapReduceExtendedCell;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.wal.WALEdit;
 import org.apache.hadoop.hbase.wal.WALKey;
@@ -676,12 +676,12 @@ public class TestCellBasedImportExport2 {
   @Override
   public Void answer(InvocationOnMock invocation) throws Throwable {
 ImmutableBytesWritable writer = (ImmutableBytesWritable) 
invocation.getArguments()[0];
-MapReduceCell key = (MapReduceCell) invocation.getArguments()[1];
+MapReduceExtendedCell key = (MapReduceExtendedCell) 
invocation.getArguments()[1];
 assertEquals("Key", Bytes.toString(writer.get()));
 assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
 return null;
   }
-}).when(ctx).write(any(ImmutableBytesWritable.class), 
any(MapReduceCell.class));
+}).when(ctx).write(any(ImmutableBytesWritable.class), 
any(MapReduceExtendedCell.class));
 
 importer.setup(ctx);
 Result value = mock(Result.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/2468f300/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedWALPlayer2.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedWALPlayer2.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedWALPlayer2.java
index 99bd5c1..03bd0b1 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedWALPlayer2.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedWALPlayer2.java
@@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MapReduceTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.LauncherSecurityManager;
-import org.apache.hadoop.hbase.util.MapReduceCell;
+import org.apache.hadoop.hbase.util.MapReduceExtendedCell;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.apache.hadoop.hbase.wal.WALKey;
 import org.apache.hadoop.mapreduce.Mapper;
@@ -185,12 +185,12 @@ public class TestCellBasedWALPlayer2 {
   @Override
   public Void answer(InvocationOnMock invocation) throws Throwable {
 ImmutableBytesWritable writer = (ImmutableBytesWritable) 
invocation.getArguments()[0];
-MapReduceCell key = (MapReduceCell) invocation.getArguments()[1];
+MapReduceExtendedCell key = (MapReduceExtendedCell) 
invocation.getArguments()[1];
 assertEquals("row", Bytes.toString(writer.get()));
 assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
 return null;
   }
-}).when(context).write(any(ImmutableBytesWritable.class), 
any(MapReduceCell.class));
+}).when(context).write(any(ImmutableBytesWritable.class), 
any(MapReduceExtendedCell.class));
 
 mapper.map(key, value, context);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/2468f300/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
index 50d5ddc..cfc3dd9 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileWriterImpl.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hbase.ByteBufferCell;
+import org.apache.hadoop.hbase.ByteBufferExtendedCell;
 import org.apache.hadoop.hbase.Cell;
 import 

[3/3] hbase git commit: HBASE-19628 ByteBufferCell should extend ExtendedCell

2017-12-27 Thread chia7712
HBASE-19628 ByteBufferCell should extend ExtendedCell


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6b39062e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6b39062e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6b39062e

Branch: refs/heads/master
Commit: 6b39062e8667e674fb9d5a7a3578fe7d21e7ef05
Parents: 467a466
Author: Chia-Ping Tsai 
Authored: Wed Dec 27 22:09:42 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Dec 27 22:26:40 2017 +0800

--
 .../mapreduce/MapReduceHFileSplitterJob.java|   7 +-
 .../hadoop/hbase/filter/ColumnPrefixFilter.java |   8 +-
 .../hadoop/hbase/filter/KeyOnlyFilter.java  |  57 ++-
 .../hadoop/hbase/filter/PrefixFilter.java   |   8 +-
 .../hbase/shaded/protobuf/ProtobufUtil.java |  20 +-
 .../hadoop/hbase/filter/TestKeyOnlyFilter.java  |   6 +-
 .../org/apache/hadoop/hbase/ByteBufferCell.java | 120 -
 .../hadoop/hbase/ByteBufferExtendedCell.java| 121 +
 .../hadoop/hbase/ByteBufferKeyOnlyKeyValue.java |  44 +-
 .../apache/hadoop/hbase/ByteBufferKeyValue.java |   6 +-
 .../apache/hadoop/hbase/CellComparatorImpl.java |  72 +--
 .../java/org/apache/hadoop/hbase/CellUtil.java  | 181 +++
 .../hadoop/hbase/ExtendedCellBuilder.java   |   2 +-
 .../apache/hadoop/hbase/PrivateCellUtil.java| 502 ++-
 .../java/org/apache/hadoop/hbase/TagUtil.java   |   6 +-
 .../io/encoding/BufferedDataBlockEncoder.java   |  14 +-
 .../hbase/io/encoding/RowIndexSeekerV1.java |   8 +-
 .../hadoop/hbase/TestByteBufferKeyValue.java|   4 +-
 .../hbase/io/TestTagCompressionContext.java |  10 +-
 .../hadoop/hbase/mapreduce/CellSortReducer.java |   4 +-
 .../hbase/mapreduce/HFileOutputFormat2.java |   8 +-
 .../apache/hadoop/hbase/mapreduce/Import.java   |  10 +-
 .../hadoop/hbase/mapreduce/WALPlayer.java   |   6 +-
 .../apache/hadoop/hbase/util/MapReduceCell.java | 271 --
 .../hbase/util/MapReduceExtendedCell.java   | 270 ++
 .../hbase/mapreduce/TestImportExport.java   |   4 +-
 .../hadoop/hbase/mapreduce/TestWALPlayer.java   |   4 +-
 .../hadoop/hbase/io/hfile/HFileWriterImpl.java  |  29 +-
 .../hbase/regionserver/ByteBufferChunkCell.java |  48 --
 .../regionserver/ByteBufferChunkKeyValue.java   |  48 ++
 .../hadoop/hbase/regionserver/CellChunkMap.java |   2 +-
 .../hbase/regionserver/MemStoreLABImpl.java |   4 +-
 .../regionserver/NoTagByteBufferChunkCell.java  |  48 --
 .../NoTagByteBufferChunkKeyValue.java   |  48 ++
 .../hbase/regionserver/RSRpcServices.java   |   6 +-
 .../encoding/TestBufferedDataBlockEncoder.java  |   4 +-
 36 files changed, 1059 insertions(+), 951 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6b39062e/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java
--
diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java
 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java
index 31428d0..cb5df5e 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java
@@ -36,7 +36,7 @@ import org.apache.hadoop.hbase.mapreduce.HFileInputFormat;
 import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2;
 import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.MapReduceCell;
+import org.apache.hadoop.hbase.util.MapReduceExtendedCell;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
@@ -78,7 +78,8 @@ public class MapReduceHFileSplitterJob extends Configured 
implements Tool {
 @Override
 public void map(NullWritable key, Cell value, Context context)
 throws IOException, InterruptedException {
-  context.write(new ImmutableBytesWritable(CellUtil.cloneRow(value)), new 
MapReduceCell(value));
+  context.write(new ImmutableBytesWritable(CellUtil.cloneRow(value)),
+  new MapReduceExtendedCell(value));
 }
 
 @Override
@@ -113,7 +114,7 @@ public class MapReduceHFileSplitterJob extends Configured 
implements Tool {
   job.setReducerClass(CellSortReducer.class);
   Path outputDir = new Path(hfileOutPath);
   FileOutputFormat.setOutputPath(job, outputDir);
-  job.setMapOutputValueClass(MapReduceCell.class);
+  job.setMapOutputValueClass(MapReduceExtendedCell.class);
   try (Connection conn = 

[45/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64InputStream.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64InputStream.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64InputStream.html
index b9f6622..986acc0 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64InputStream.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64InputStream.html
@@ -936,7 +936,7 @@
 928  bytes = 
s.getBytes(PREFERRED_ENCODING);
 929
 930} catch (UnsupportedEncodingException 
uee) {
-931  bytes = 
s.getBytes(StandardCharsets.UTF_8);
+931  bytes = Bytes.toBytes(s);
 932} // end catch
 933
 934// Decode

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64OutputStream.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64OutputStream.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64OutputStream.html
index b9f6622..986acc0 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64OutputStream.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.Base64OutputStream.html
@@ -936,7 +936,7 @@
 928  bytes = 
s.getBytes(PREFERRED_ENCODING);
 929
 930} catch (UnsupportedEncodingException 
uee) {
-931  bytes = 
s.getBytes(StandardCharsets.UTF_8);
+931  bytes = Bytes.toBytes(s);
 932} // end catch
 933
 934// Decode

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.html 
b/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.html
index b9f6622..986acc0 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/util/Base64.html
@@ -936,7 +936,7 @@
 928  bytes = 
s.getBytes(PREFERRED_ENCODING);
 929
 930} catch (UnsupportedEncodingException 
uee) {
-931  bytes = 
s.getBytes(StandardCharsets.UTF_8);
+931  bytes = Bytes.toBytes(s);
 932} // end catch
 933
 934// Decode

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/book.html
--
diff --git a/book.html b/book.html
index 03d89ab..44b5e0b 100644
--- a/book.html
+++ b/book.html
@@ -37177,7 +37177,7 @@ The server will return cellblocks compressed using this 
same compressor as long
 
 
 Version 3.0.0-SNAPSHOT
-Last updated 2017-12-25 16:50:56 UTC
+Last updated 2017-12-27 14:29:47 UTC
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index cfa1251..8bc99d7 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Bulk Loads in Apache HBase (TM)
@@ -311,7 +311,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-26
+  Last Published: 
2017-12-27
 
 
 



[40/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/KeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/KeyValue.html
index 628bd79..8d9a06f 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":9,"i2":10,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":9,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":9,"i21":10,"i22":10,"i23":10,"i24":9,"i25":9,"i26":9,"i27":10,"i28":9,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":9,"i56":10,"i57":9,"i58":9,"i59":41,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":9,"i69":10,"i70":9};
+var methods = 
{"i0":10,"i1":9,"i2":10,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":10,"i9":10,"i10":10,"i11":10,"i12":9,"i13":9,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":9,"i21":10,"i22":10,"i23":10,"i24":9,"i25":9,"i26":9,"i27":10,"i28":9,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":9,"i54":10,"i55":9,"i56":9,"i57":41,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":9,"i67":10,"i68":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class KeyValue
+public class KeyValue
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements ExtendedCell
 An HBase Key/Value. This is the fundamental HBase Type.
@@ -935,76 +935,64 @@ implements getSerializedSize(booleanwithTags)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalTag
-getTag(bytetype)
-Returns the specific tag of the given type
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
-getTags()
-Creates a list of tags in the current cell
-
-
-
 byte[]
 getTagsArray()
 Contiguous raw bytes representing tags that may start at 
any index in the containing array.
 
 
-
+
 int
 getTagsLength()
 This returns the total length of the tag bytes
 
 
-
+
 int
 getTagsOffset()
 This returns the offset where the tag actually starts.
 
 
-
+
 long
 getTimestamp()
 
-
+
 (package private) long
 getTimestamp(intkeylength)
 
-
+
 int
 getTimestampOffset()
 
-
+
 private int
 getTimestampOffset(intkeylength)
 
-
+
 byte
 getTypeByte()
 
-
+
 byte[]
 getValueArray()
 Contiguous raw bytes that may start at any index in the 
containing array.
 
 
-
+
 int
 getValueLength()
 
-
+
 int
 getValueOffset()
 
-
+
 int
 hashCode()
 In line with equals(Object),
 only uses the key portion, not the value.
 
 
-
+
 long
 heapSize()
 HeapSize implementation
@@ -1013,19 +1001,19 @@ implements 
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 humanReadableTimestamp(longtimestamp)
 
-
+
 boolean
 isLatestTimestamp()
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 keyToString(byte[]k)
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 keyToString(byte[]b,
into,
@@ -1033,7 +1021,7 @@ implements Use for logging.
 
 
-
+
 static long
 oswrite(KeyValuekv,
http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamout,
@@ -1044,66 +1032,66 @@ implements 
 
 
-
+
 void
 setSequenceId(longseqId)
 Sets with the given seqId.
 
 
-
+
 void
 setTimestamp(byte[]ts)
 Sets with the given timestamp.
 
 
-
+
 void
 setTimestamp(longts)
 Sets with the given timestamp.
 
 
-
+
 KeyValue
 shallowCopy()
 Creates a shallow copy of this KeyValue, reusing the data 
byte buffer.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 

[49/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/apidocs/src-html/org/apache/hadoop/hbase/ServerLoad.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/ServerLoad.html 
b/apidocs/src-html/org/apache/hadoop/hbase/ServerLoad.html
index df42ef7..cd5390f 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/ServerLoad.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/ServerLoad.html
@@ -31,554 +31,555 @@
 023import java.util.Arrays;
 024import java.util.List;
 025import java.util.Map;
-026import java.util.TreeMap;
-027import java.util.stream.Collectors;
-028import 
org.apache.hadoop.hbase.replication.ReplicationLoadSink;
-029import 
org.apache.hadoop.hbase.replication.ReplicationLoadSource;
-030import 
org.apache.hadoop.hbase.util.Bytes;
-031import 
org.apache.hadoop.hbase.util.Strings;
-032import 
org.apache.yetus.audience.InterfaceAudience;
-033
-034import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Objects;
-035import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;
-036
-037/**
-038 * This class is used for exporting 
current state of load on a RegionServer.
-039 *
-040 * @deprecated As of release 2.0.0, this 
will be removed in HBase 3.0.0
-041 * Use {@link ServerMetrics} 
instead.
-042 */
-043@InterfaceAudience.Public
-044@Deprecated
-045public class ServerLoad implements 
ServerMetrics {
-046  private final ServerMetrics metrics;
-047  private int stores = 0;
-048  private int storefiles = 0;
-049  private int storeUncompressedSizeMB = 
0;
-050  private int storefileSizeMB = 0;
-051  private int memstoreSizeMB = 0;
-052  private long storefileIndexSizeKB = 
0;
-053  private long readRequestsCount = 0;
-054  private long filteredReadRequestsCount 
= 0;
-055  private long writeRequestsCount = 0;
-056  private int rootIndexSizeKB = 0;
-057  private int totalStaticIndexSizeKB = 
0;
-058  private int totalStaticBloomSizeKB = 
0;
-059  private long totalCompactingKVs = 0;
-060  private long currentCompactedKVs = 0;
-061
-062  /**
-063   * DONT USE this construction. It make 
a fake server name;
-064   */
-065  @InterfaceAudience.Private
-066  public 
ServerLoad(ClusterStatusProtos.ServerLoad serverLoad) {
-067
this(ServerName.valueOf("localhost,1,1"), serverLoad);
-068  }
-069
-070  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD")
-071  @InterfaceAudience.Private
-072  public ServerLoad(ServerName name, 
ClusterStatusProtos.ServerLoad serverLoad) {
-073
this(ServerMetricsBuilder.toServerMetrics(name, serverLoad));
-074this.serverLoad = serverLoad;
-075  }
-076
-077  @InterfaceAudience.Private
-078  public ServerLoad(ServerMetrics 
metrics) {
-079this.metrics = metrics;
-080this.serverLoad = 
ServerMetricsBuilder.toServerLoad(metrics);
-081for (RegionMetrics rl : 
metrics.getRegionMetrics().values()) {
-082  stores += rl.getStoreCount();
-083  storefiles += 
rl.getStoreFileCount();
-084  storeUncompressedSizeMB += 
rl.getUncompressedStoreFileSize().get(Size.Unit.MEGABYTE);
-085  storefileSizeMB += 
rl.getStoreFileSize().get(Size.Unit.MEGABYTE);
-086  memstoreSizeMB += 
rl.getMemStoreSize().get(Size.Unit.MEGABYTE);
-087  readRequestsCount += 
rl.getReadRequestCount();
-088  filteredReadRequestsCount += 
rl.getFilteredReadRequestCount();
-089  writeRequestsCount += 
rl.getWriteRequestCount();
-090  storefileIndexSizeKB += 
rl.getStoreFileIndexSize().get(Size.Unit.KILOBYTE);
-091  rootIndexSizeKB += 
rl.getStoreFileRootLevelIndexSize().get(Size.Unit.KILOBYTE);
-092  totalStaticIndexSizeKB += 
rl.getStoreFileUncompressedDataIndexSize().get(Size.Unit.KILOBYTE);
-093  totalStaticBloomSizeKB += 
rl.getBloomFilterSize().get(Size.Unit.KILOBYTE);
-094  totalCompactingKVs += 
rl.getCompactingCellCount();
-095  currentCompactedKVs += 
rl.getCompactedCellCount();
-096}
-097  }
-098
-099  /**
-100   * NOTE: Function name cannot start 
with "get" because then an OpenDataException is thrown because
-101   * HBaseProtos.ServerLoad cannot be 
converted to an open data type(see HBASE-5967).
-102   * @return the underlying ServerLoad 
protobuf object
-103   * @deprecated DONT use this pb object 
since the byte array backed may be modified in rpc layer
-104   */
-105  @InterfaceAudience.Private
-106  @Deprecated
-107  public ClusterStatusProtos.ServerLoad 
obtainServerLoadPB() {
-108return serverLoad;
-109  }
-110
-111  protected 
ClusterStatusProtos.ServerLoad serverLoad;
-112
-113  /**
-114   * @return number of requests  since 
last report.
-115   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-116   * Use {@link 
#getRequestCountPerSecond} instead.
-117   */
-118  @Deprecated
-119  public long getNumberOfRequests() {
-120return getRequestCountPerSecond();
-121  }
-122
-123  /**
-124   * @deprecated As of release 

[32/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/ServerMetricsBuilder.ServerMetricsImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/ServerMetricsBuilder.ServerMetricsImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/ServerMetricsBuilder.ServerMetricsImpl.html
index 4cf963f..964e621 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/ServerMetricsBuilder.ServerMetricsImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/ServerMetricsBuilder.ServerMetricsImpl.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class ServerMetricsBuilder.ServerMetricsImpl
+private static class ServerMetricsBuilder.ServerMetricsImpl
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements ServerMetrics
 
@@ -139,7 +139,7 @@ implements Field and Description
 
 
-private http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+private http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 coprocessorNames
 
 
@@ -201,7 +201,7 @@ implements Constructor and Description
 
 
-ServerMetricsImpl(ServerNameserverName,
+ServerMetricsImpl(ServerNameserverName,
  longrequestCountPerSecond,
  longrequestCount,
  SizeusedHeapSize,
@@ -210,7 +210,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationLoadSourcesources,
  ReplicationLoadSinksink,
  http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],RegionMetricsregionStatus,
- http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringcoprocessorNames,
+ http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringcoprocessorNames,
  longreportTimestamp,
  longlastReportTimestamp)
 
@@ -230,7 +230,7 @@ implements Method and Description
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getCoprocessorNames()
 Return the RegionServer-level and Region-level 
coprocessors
 
@@ -315,7 +315,7 @@ implements 
 
 serverName
-private finalServerName serverName
+private finalServerName serverName
 
 
 
@@ -324,7 +324,7 @@ implements 
 
 requestCountPerSecond
-private finallong requestCountPerSecond
+private finallong requestCountPerSecond
 
 
 
@@ -333,7 +333,7 @@ implements 
 
 requestCount
-private finallong requestCount
+private finallong requestCount
 
 
 
@@ -342,7 +342,7 @@ implements 
 
 usedHeapSize
-private finalSize usedHeapSize
+private finalSize usedHeapSize
 
 
 
@@ -351,7 +351,7 @@ implements 
 
 maxHeapSize
-private finalSize maxHeapSize
+private finalSize maxHeapSize
 
 
 
@@ -360,7 +360,7 @@ implements 
 
 infoServerPort
-private finalint infoServerPort
+private finalint infoServerPort
 
 
 
@@ -369,7 +369,7 @@ implements 
 
 sources
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationLoadSource sources
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationLoadSource sources
 
 
 
@@ -379,7 +379,7 @@ implements 
 sink
 @Nullable
-private finalReplicationLoadSink sink
+private finalReplicationLoadSink sink
 
 
 
@@ -388,7 +388,7 @@ private final
 
 regionStatus
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],RegionMetrics regionStatus

[51/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
Published site at .


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/d2b28a1a
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/d2b28a1a
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/d2b28a1a

Branch: refs/heads/asf-site
Commit: d2b28a1a21ee66dca623a3bfbf178954fe4b766e
Parents: 4cddebd
Author: jenkins 
Authored: Wed Dec 27 15:19:17 2017 +
Committer: jenkins 
Committed: Wed Dec 27 15:19:17 2017 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 4 +-
 apidocs/org/apache/hadoop/hbase/ServerLoad.html |   110 +-
 .../org/apache/hadoop/hbase/ServerMetrics.html  |30 +-
 .../apache/hadoop/hbase/client/Mutation.html|80 +-
 .../hbase/mapreduce/HFileOutputFormat2.html |18 +-
 .../org/apache/hadoop/hbase/ServerLoad.html |  1097 +-
 .../org/apache/hadoop/hbase/ServerMetrics.html  |   137 +-
 .../apache/hadoop/hbase/client/Mutation.html|  1870 +-
 .../hbase/mapreduce/HFileOutputFormat2.html |  1675 +-
 .../hbase/util/Base64.Base64InputStream.html| 2 +-
 .../hbase/util/Base64.Base64OutputStream.html   | 2 +-
 .../org/apache/hadoop/hbase/util/Base64.html| 2 +-
 book.html   | 2 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 29812 -
 checkstyle.rss  |18 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/constant-values.html | 6 +-
 devapidocs/index-all.html   |44 +-
 .../apache/hadoop/hbase/ByteBufferKeyValue.html |   210 +-
 .../hadoop/hbase/IndividualBytesFieldCell.html  |   178 +-
 .../hadoop/hbase/KeyValue.KVComparator.html |54 +-
 .../hadoop/hbase/KeyValue.KeyOnlyKeyValue.html  |74 +-
 .../hadoop/hbase/KeyValue.MetaComparator.html   |18 +-
 .../hbase/KeyValue.SamePrefixComparator.html| 4 +-
 .../org/apache/hadoop/hbase/KeyValue.Type.html  |28 +-
 .../org/apache/hadoop/hbase/KeyValue.html   |   350 +-
 .../hadoop/hbase/NoTagsByteBufferKeyValue.html  | 4 +-
 .../org/apache/hadoop/hbase/NoTagsKeyValue.html | 4 +-
 .../PrivateCellUtil.EmptyByteBufferCell.html|   140 +-
 .../hadoop/hbase/PrivateCellUtil.EmptyCell.html |   112 +-
 ...rivateCellUtil.FirstOnRowByteBufferCell.html |30 +-
 .../hbase/PrivateCellUtil.FirstOnRowCell.html   |30 +-
 ...ateCellUtil.FirstOnRowColByteBufferCell.html |36 +-
 .../PrivateCellUtil.FirstOnRowColCell.html  |36 +-
 ...eCellUtil.FirstOnRowColTSByteBufferCell.html |16 +-
 .../PrivateCellUtil.FirstOnRowColTSCell.html|16 +-
 ...vateCellUtil.FirstOnRowDeleteFamilyCell.html |30 +-
 ...PrivateCellUtil.LastOnRowByteBufferCell.html |30 +-
 .../hbase/PrivateCellUtil.LastOnRowCell.html|30 +-
 ...vateCellUtil.LastOnRowColByteBufferCell.html |36 +-
 .../hbase/PrivateCellUtil.LastOnRowColCell.html |36 +-
 ...rivateCellUtil.TagRewriteByteBufferCell.html |   170 +-
 .../hbase/PrivateCellUtil.TagRewriteCell.html   |   142 +-
 ...llUtil.ValueAndTagRewriteByteBufferCell.html |30 +-
 .../PrivateCellUtil.ValueAndTagRewriteCell.html |30 +-
 .../apache/hadoop/hbase/PrivateCellUtil.html|   190 +-
 devapidocs/org/apache/hadoop/hbase/RawCell.html |16 +-
 .../org/apache/hadoop/hbase/ServerLoad.html |   142 +-
 .../org/apache/hadoop/hbase/ServerMetrics.html  |30 +-
 .../ServerMetricsBuilder.ServerMetricsImpl.html |68 +-
 .../hadoop/hbase/ServerMetricsBuilder.html  |70 +-
 .../apache/hadoop/hbase/SizeCachedKeyValue.html | 4 +-
 .../hadoop/hbase/SizeCachedNoTagsKeyValue.html  | 4 +-
 .../hadoop/hbase/backup/package-tree.html   | 4 +-
 .../hadoop/hbase/class-use/RegionMetrics.html   | 4 +-
 .../hadoop/hbase/class-use/ServerName.html  | 4 +-
 .../org/apache/hadoop/hbase/class-use/Size.html | 4 +-
 .../org/apache/hadoop/hbase/class-use/Tag.html  |   128 +-
 .../client/HBaseAdmin.AbortProcedureFuture.html | 8 +-
 .../HBaseAdmin.AddColumnFamilyFuture.html   | 6 +-
 .../client/HBaseAdmin.CreateTableFuture.html|14 +-
 .../HBaseAdmin.DeleteColumnFamilyFuture.html| 6 +-
 .../client/HBaseAdmin.DeleteTableFuture.html|10 +-
 .../client/HBaseAdmin.DisableTableFuture.html   | 8 +-
 .../client/HBaseAdmin.EnableTableFuture.html| 8 +-
 

[09/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowCell.html
index 3400507..2baa140 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowCell.html
@@ -28,3034 +28,2926 @@
 020import static 
org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
 021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
 022
-023import 
com.google.common.annotations.VisibleForTesting;
-024
-025import java.io.DataOutput;
-026import java.io.DataOutputStream;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.math.BigDecimal;
-030import java.nio.ByteBuffer;
-031import java.util.ArrayList;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Optional;
-035
-036import 
org.apache.hadoop.hbase.KeyValue.Type;
-037import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-038import 
org.apache.hadoop.hbase.io.HeapSize;
-039import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-040import 
org.apache.hadoop.hbase.io.util.Dictionary;
-041import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-042import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-043import 
org.apache.hadoop.hbase.util.ByteRange;
-044import 
org.apache.hadoop.hbase.util.Bytes;
-045import 
org.apache.hadoop.hbase.util.ClassSize;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047
-048
-049/**
-050 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
-051 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
-052 */
-053@InterfaceAudience.Private
-054public final class PrivateCellUtil {
-055
-056  /**
-057   * Private constructor to keep this 
class from being instantiated.
-058   */
-059  private PrivateCellUtil() {
-060  }
+023import java.io.DataOutput;
+024import java.io.DataOutputStream;
+025import java.io.IOException;
+026import java.io.OutputStream;
+027import java.math.BigDecimal;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Iterator;
+031import java.util.List;
+032import java.util.Optional;
+033import 
org.apache.hadoop.hbase.KeyValue.Type;
+034import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
+035import 
org.apache.hadoop.hbase.io.HeapSize;
+036import 
org.apache.hadoop.hbase.io.TagCompressionContext;
+037import 
org.apache.hadoop.hbase.io.util.Dictionary;
+038import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+039import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+040import 
org.apache.hadoop.hbase.util.ByteRange;
+041import 
org.apache.hadoop.hbase.util.Bytes;
+042import 
org.apache.hadoop.hbase.util.ClassSize;
+043import 
org.apache.yetus.audience.InterfaceAudience;
+044
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+046
+047/**
+048 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
+049 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
+050 */
+051@InterfaceAudience.Private
+052public final class PrivateCellUtil {
+053
+054  /**
+055   * Private constructor to keep this 
class from being instantiated.
+056   */
+057  private PrivateCellUtil() {
+058  }
+059
+060  /*** ByteRange 
***/
 061
-062  /*** ByteRange 
***/
-063
-064  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-065return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-066  }
-067
-068  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-069return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-070  }
-071
-072  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-073return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-074  cell.getQualifierLength());
-075  }
-076
-077  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-078return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-079  }
-080
-081  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-082return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-083  }
+062  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
+063return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
+064  }
+065
+066  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
+067return 
range.set(cell.getFamilyArray(), 

[28/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/client/Mutation.CellWrapper.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/Mutation.CellWrapper.html 
b/devapidocs/org/apache/hadoop/hbase/client/Mutation.CellWrapper.html
index 975d227..d14d073 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Mutation.CellWrapper.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Mutation.CellWrapper.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static final class Mutation.CellWrapper
+private static final class Mutation.CellWrapper
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements ExtendedCell
 
@@ -277,7 +277,7 @@ implements 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
+http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorTag
 getTags()
 Creates a list of tags in the current cell
 
@@ -388,7 +388,7 @@ implements 
 
 FIXED_OVERHEAD
-private static finallong FIXED_OVERHEAD
+private static finallong FIXED_OVERHEAD
 
 
 
@@ -397,7 +397,7 @@ implements 
 
 cell
-private finalCell cell
+private finalCell cell
 
 
 
@@ -406,7 +406,7 @@ implements 
 
 sequenceId
-privatelong sequenceId
+privatelong sequenceId
 
 
 
@@ -415,7 +415,7 @@ implements 
 
 timestamp
-privatelong timestamp
+privatelong timestamp
 
 
 
@@ -432,7 +432,7 @@ implements 
 
 CellWrapper
-CellWrapper(Cellcell)
+CellWrapper(Cellcell)
 
 
 
@@ -449,7 +449,7 @@ implements 
 
 setSequenceId
-publicvoidsetSequenceId(longseqId)
+publicvoidsetSequenceId(longseqId)
 Description copied from 
interface:ExtendedCell
 Sets with the given seqId.
 
@@ -466,7 +466,7 @@ implements 
 
 setTimestamp
-publicvoidsetTimestamp(longts)
+publicvoidsetTimestamp(longts)
 Description copied from 
interface:ExtendedCell
 Sets with the given timestamp.
 
@@ -483,7 +483,7 @@ implements 
 
 setTimestamp
-publicvoidsetTimestamp(byte[]ts)
+publicvoidsetTimestamp(byte[]ts)
 Description copied from 
interface:ExtendedCell
 Sets with the given timestamp.
 
@@ -500,7 +500,7 @@ implements 
 
 getSequenceId
-publiclonggetSequenceId()
+publiclonggetSequenceId()
 Description copied from 
interface:ExtendedCell
 A region-specific unique monotonically increasing sequence 
ID given to each Cell. It always
  exists for cells in the memstore but is not retained forever. It will be kept 
for
@@ -522,7 +522,7 @@ implements 
 
 getValueArray
-publicbyte[]getValueArray()
+publicbyte[]getValueArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array. Max length is
  Integer.MAX_VALUE which is 2,147,483,647 bytes.
@@ -540,7 +540,7 @@ implements 
 
 getValueOffset
-publicintgetValueOffset()
+publicintgetValueOffset()
 
 Specified by:
 getValueOffsetin
 interfaceCell
@@ -555,7 +555,7 @@ implements 
 
 getValueLength
-publicintgetValueLength()
+publicintgetValueLength()
 
 Specified by:
 getValueLengthin
 interfaceCell
@@ -570,7 +570,7 @@ implements 
 
 getTagsArray
-publicbyte[]getTagsArray()
+publicbyte[]getTagsArray()
 Description copied from 
interface:ExtendedCell
 Contiguous raw bytes representing tags that may start at 
any index in the containing array.
 
@@ -589,7 +589,7 @@ implements 
 
 getTagsOffset
-publicintgetTagsOffset()
+publicintgetTagsOffset()
 
 Specified by:
 getTagsOffsetin
 interfaceCell
@@ -606,7 +606,7 @@ implements 
 
 getTagsLength
-publicintgetTagsLength()
+publicintgetTagsLength()
 Description copied from 
interface:ExtendedCell
 HBase internally uses 2 bytes to store tags length in Cell. 
As the tags length is always a
  non-negative number, to make good use of the sign bit, the max of tags length 
is defined 2 *
@@ -629,7 +629,7 @@ implements 
 
 getRowArray
-publicbyte[]getRowArray()
+publicbyte[]getRowArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.
@@ -647,7 +647,7 @@ implements 
 
 getRowOffset
-publicintgetRowOffset()
+publicintgetRowOffset()
 
 Specified by:
 getRowOffsetin
 interfaceCell
@@ -662,7 +662,7 @@ implements 
 
 getRowLength
-publicshortgetRowLength()
+publicshortgetRowLength()
 
 Specified by:
 getRowLengthin
 interfaceCell
@@ -677,7 +677,7 @@ implements 
 
 getFamilyArray
-publicbyte[]getFamilyArray()
+publicbyte[]getFamilyArray()
 Description copied from 
interface:Cell
 Contiguous bytes composed of legal HDFS filename characters 
which may start at any index in the
  containing array. Max length is Byte.MAX_VALUE, which is 127 bytes.
@@ -695,7 +695,7 @@ implements 
 
 getFamilyOffset
-publicintgetFamilyOffset()

[03/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowByteBufferCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowByteBufferCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowByteBufferCell.html
index 3400507..2baa140 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowByteBufferCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowByteBufferCell.html
@@ -28,3034 +28,2926 @@
 020import static 
org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
 021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
 022
-023import 
com.google.common.annotations.VisibleForTesting;
-024
-025import java.io.DataOutput;
-026import java.io.DataOutputStream;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.math.BigDecimal;
-030import java.nio.ByteBuffer;
-031import java.util.ArrayList;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Optional;
-035
-036import 
org.apache.hadoop.hbase.KeyValue.Type;
-037import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-038import 
org.apache.hadoop.hbase.io.HeapSize;
-039import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-040import 
org.apache.hadoop.hbase.io.util.Dictionary;
-041import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-042import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-043import 
org.apache.hadoop.hbase.util.ByteRange;
-044import 
org.apache.hadoop.hbase.util.Bytes;
-045import 
org.apache.hadoop.hbase.util.ClassSize;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047
-048
-049/**
-050 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
-051 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
-052 */
-053@InterfaceAudience.Private
-054public final class PrivateCellUtil {
-055
-056  /**
-057   * Private constructor to keep this 
class from being instantiated.
-058   */
-059  private PrivateCellUtil() {
-060  }
+023import java.io.DataOutput;
+024import java.io.DataOutputStream;
+025import java.io.IOException;
+026import java.io.OutputStream;
+027import java.math.BigDecimal;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Iterator;
+031import java.util.List;
+032import java.util.Optional;
+033import 
org.apache.hadoop.hbase.KeyValue.Type;
+034import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
+035import 
org.apache.hadoop.hbase.io.HeapSize;
+036import 
org.apache.hadoop.hbase.io.TagCompressionContext;
+037import 
org.apache.hadoop.hbase.io.util.Dictionary;
+038import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+039import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+040import 
org.apache.hadoop.hbase.util.ByteRange;
+041import 
org.apache.hadoop.hbase.util.Bytes;
+042import 
org.apache.hadoop.hbase.util.ClassSize;
+043import 
org.apache.yetus.audience.InterfaceAudience;
+044
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+046
+047/**
+048 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
+049 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
+050 */
+051@InterfaceAudience.Private
+052public final class PrivateCellUtil {
+053
+054  /**
+055   * Private constructor to keep this 
class from being instantiated.
+056   */
+057  private PrivateCellUtil() {
+058  }
+059
+060  /*** ByteRange 
***/
 061
-062  /*** ByteRange 
***/
-063
-064  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-065return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-066  }
-067
-068  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-069return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-070  }
-071
-072  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-073return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-074  cell.getQualifierLength());
-075  }
-076
-077  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-078return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-079  }
-080
-081  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-082return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-083  }
+062  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
+063return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
+064  }
+065
+066  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
+067return 

[35/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.ValueAndTagRewriteByteBufferCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.ValueAndTagRewriteByteBufferCell.html
 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.ValueAndTagRewriteByteBufferCell.html
index 06588fb..1dc3d76 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.ValueAndTagRewriteByteBufferCell.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.ValueAndTagRewriteByteBufferCell.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class PrivateCellUtil.ValueAndTagRewriteByteBufferCell
+static class PrivateCellUtil.ValueAndTagRewriteByteBufferCell
 extends PrivateCellUtil.TagRewriteByteBufferCell
 
 
@@ -277,7 +277,7 @@ extends PrivateCellUtil.TagRewriteByteBufferCell
-getFamilyArray,
 getFamilyByteBuffer,
 getFamilyLength,
 getFamilyOffset,
 getFamilyPosition,
 getQualifierArray,
 getQualifierByteBuffer,
 getQualifierLength,
 getQualifierOffset,
 getQualifierPosition,
 getRowArray,
 getRowByteBuffer,
 getRowLength,
 getRowOffset,
 getRowPositio
 n, getSequenceId,
 getTag,
 getTags,
 getTagsArray,
 getTagsByteBuffer,
 getTagsLength,
 getTagsOffset,
 getTags
 Position, getTimestamp,
 getTypeByte,
 setSequenceId,
 setTimestamp,
 setTimestamp
+getFamilyArray,
 getFamilyByteBuffer,
 getFamilyLength,
 getFamilyOffset,
 getFamilyPosition,
 getQualifierArray,
 getQualifierByteBuffer,
 getQualifierLength,
 getQualifierOffset,
 getQualifierPosition,
 getRowArray,
 getRowByteBuffer,
 getRowLength,
 getRowOffset,
 getRowPositio
 n, getSequenceId,
 getTagsArray,
 getTagsByteBuffer,
 getTagsLength,
 getTagsOffset,
 getTagsPosition,
 getTimestamp,
 getTypeByte, setSequenceId,
 setTimestamp,
 setTimestamp
 
 
 
@@ -298,7 +298,7 @@ extends RawCell
-checkForTagsLength,
 cloneTags
+checkForTagsLength,
 cloneTags,
 getTag,
 getTags
 
 
 
@@ -320,7 +320,7 @@ extends 
 
 value
-protectedbyte[] value
+protectedbyte[] value
 
 
 
@@ -337,7 +337,7 @@ extends 
 
 ValueAndTagRewriteByteBufferCell
-publicValueAndTagRewriteByteBufferCell(ByteBufferCellcell,
+publicValueAndTagRewriteByteBufferCell(ByteBufferCellcell,
 byte[]value,
 byte[]tags)
 
@@ -356,7 +356,7 @@ extends 
 
 getValueArray
-publicbyte[]getValueArray()
+publicbyte[]getValueArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array. Max length is
  Integer.MAX_VALUE which is 2,147,483,647 bytes.
@@ -376,7 +376,7 @@ extends 
 
 getValueOffset
-publicintgetValueOffset()
+publicintgetValueOffset()
 
 Specified by:
 getValueOffsetin
 interfaceCell
@@ -393,7 +393,7 @@ extends 
 
 getValueLength
-publicintgetValueLength()
+publicintgetValueLength()
 
 Specified by:
 getValueLengthin
 interfaceCell
@@ -410,7 +410,7 @@ extends 
 
 getValueByteBuffer
-publichttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffergetValueByteBuffer()
+publichttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffergetValueByteBuffer()
 
 Overrides:
 getValueByteBufferin
 classPrivateCellUtil.TagRewriteByteBufferCell
@@ -425,7 +425,7 @@ extends 
 
 getValuePosition
-publicintgetValuePosition()
+publicintgetValuePosition()
 
 Overrides:
 getValuePositionin
 classPrivateCellUtil.TagRewriteByteBufferCell
@@ -440,7 +440,7 @@ extends 
 
 heapSize
-publiclongheapSize()
+publiclongheapSize()
 
 Specified by:
 heapSizein
 interfaceHeapSize
@@ -458,7 +458,7 @@ extends 
 
 write
-publicintwrite(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamout,
+publicintwrite(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamout,
  booleanwithTags)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:ExtendedCell
@@ -489,7 +489,7 @@ extends 
 
 getSerializedSize
-publicintgetSerializedSize(booleanwithTags)
+publicintgetSerializedSize(booleanwithTags)
 
 Specified by:
 getSerializedSizein
 interfaceExtendedCell
@@ -513,7 +513,7 @@ extends 
 
 write
-publicvoidwrite(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferbuf,
+publicvoidwrite(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferbuf,
   intoffset)
 Description copied from 
interface:ExtendedCell
 

[33/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/RawCell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/RawCell.html 
b/devapidocs/org/apache/hadoop/hbase/RawCell.html
index 565ff1d..a9b9734 100644
--- a/devapidocs/org/apache/hadoop/hbase/RawCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/RawCell.html
@@ -18,8 +18,8 @@
 catch(err) {
 }
 //-->
-var methods = {"i0":17,"i1":18,"i2":6,"i3":6};
-var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],4:["t3","Abstract 
Methods"],16:["t5","Default Methods"]};
+var methods = {"i0":17,"i1":18,"i2":18,"i3":18};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],16:["t5","Default Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
 var tableTab = "tableTab";
@@ -164,7 +164,7 @@ extends 
-All MethodsStatic MethodsInstance MethodsAbstract MethodsDefault Methods
+All MethodsStatic MethodsInstance MethodsDefault Methods
 
 Modifier and Type
 Method and Description
@@ -182,13 +182,13 @@ extends 
-http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalTag
+default http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalTag
 getTag(bytetype)
 Returns the specific tag of the given type
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
+default http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorTag
 getTags()
 Creates a list of tags in the current cell
 
@@ -256,7 +256,7 @@ extends 
 
 getTags
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTaggetTags()
+defaulthttp://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorTaggetTags()
 Creates a list of tags in the current cell
 
 Returns:
@@ -270,7 +270,7 @@ extends 
 
 getTag
-http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalTaggetTag(bytetype)
+defaulthttp://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalTaggetTag(bytetype)
 Returns the specific tag of the given type
 
 Parameters:
@@ -286,7 +286,7 @@ extends 
 
 checkForTagsLength
-staticvoidcheckForTagsLength(inttagsLength)
+staticvoidcheckForTagsLength(inttagsLength)
 Check the length of tags. If it is invalid, throw 
IllegalArgumentException
 
 Parameters:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/ServerLoad.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ServerLoad.html 
b/devapidocs/org/apache/hadoop/hbase/ServerLoad.html
index 44841ae..a6b2f85 100644
--- a/devapidocs/org/apache/hadoop/hbase/ServerLoad.html
+++ b/devapidocs/org/apache/hadoop/hbase/ServerLoad.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  http://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-public class ServerLoad
+public class ServerLoad
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements ServerMetrics
 This class is used for exporting current state of load on a 
RegionServer.
@@ -299,7 +299,7 @@ implements 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getCoprocessorNames()
 Deprecated.
 Return the RegionServer-level and Region-level 
coprocessors
@@ -709,7 +709,7 @@ implements 
 
 metrics
-private finalServerMetrics metrics
+private finalServerMetrics metrics
 Deprecated.
 
 
@@ -719,7 +719,7 @@ implements 
 
 stores
-privateint stores
+privateint stores
 Deprecated.
 
 
@@ -729,7 +729,7 @@ implements 
 
 storefiles
-privateint storefiles
+privateint storefiles
 Deprecated.
 
 
@@ -739,7 +739,7 @@ implements 
 
 storeUncompressedSizeMB
-privateint storeUncompressedSizeMB
+privateint storeUncompressedSizeMB
 Deprecated.
 
 
@@ -749,7 

[37/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowDeleteFamilyCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowDeleteFamilyCell.html
 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowDeleteFamilyCell.html
index 0b00de1..8ff25d5 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowDeleteFamilyCell.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowDeleteFamilyCell.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class PrivateCellUtil.FirstOnRowDeleteFamilyCell
+private static class PrivateCellUtil.FirstOnRowDeleteFamilyCell
 extends PrivateCellUtil.EmptyCell
 
 
@@ -261,7 +261,7 @@ extends PrivateCellUtil.EmptyCell
-getFamilyOffset,
 getQualifierArray,
 getQualifierLength,
 getQualifierOffset,
 getRowOffset,
 getSequenceId,
 getTag,
 getTags,
 getTagsArray, getTagsLength,
 getTagsOffset,
 getValueArray,
 getValueLength,
 getValueOffset,
 setSequenceId,
 setTimestamp,
 setTimestamp
+getFamilyOffset,
 getQualifierArray,
 getQualifierLength,
 getQualifierOffset,
 getRowOffset,
 getSequenceId,
 getTagsArray,
 getTagsLength,
 getTagsOffset, getValueArray,
 getValueLength,
 getValueOffset,
 setSequenceId,
 setTimestamp,
 setTimestamp
 
 
 
@@ -282,7 +282,7 @@ extends RawCell
-checkForTagsLength,
 cloneTags
+checkForTagsLength,
 cloneTags,
 getTag,
 getTags
 
 
 
@@ -304,7 +304,7 @@ extends 
 
 FIXED_OVERHEAD
-private static finalint FIXED_OVERHEAD
+private static finalint FIXED_OVERHEAD
 
 
 
@@ -313,7 +313,7 @@ extends 
 
 row
-private finalbyte[] row
+private finalbyte[] row
 
 
 
@@ -322,7 +322,7 @@ extends 
 
 fam
-private finalbyte[] fam
+private finalbyte[] fam
 
 
 
@@ -339,7 +339,7 @@ extends 
 
 FirstOnRowDeleteFamilyCell
-publicFirstOnRowDeleteFamilyCell(byte[]row,
+publicFirstOnRowDeleteFamilyCell(byte[]row,
   byte[]fam)
 
 
@@ -357,7 +357,7 @@ extends 
 
 heapSize
-publiclongheapSize()
+publiclongheapSize()
 
 Returns:
 Approximate 'exclusive deep size' of implementing object.  Includes
@@ -371,7 +371,7 @@ extends 
 
 getRowArray
-publicbyte[]getRowArray()
+publicbyte[]getRowArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.
@@ -391,7 +391,7 @@ extends 
 
 getRowLength
-publicshortgetRowLength()
+publicshortgetRowLength()
 
 Specified by:
 getRowLengthin
 interfaceCell
@@ -408,7 +408,7 @@ extends 
 
 getFamilyArray
-publicbyte[]getFamilyArray()
+publicbyte[]getFamilyArray()
 Description copied from 
interface:Cell
 Contiguous bytes composed of legal HDFS filename characters 
which may start at any index in the
  containing array. Max length is Byte.MAX_VALUE, which is 127 bytes.
@@ -428,7 +428,7 @@ extends 
 
 getFamilyLength
-publicbytegetFamilyLength()
+publicbytegetFamilyLength()
 
 Specified by:
 getFamilyLengthin
 interfaceCell
@@ -445,7 +445,7 @@ extends 
 
 getTimestamp
-publiclonggetTimestamp()
+publiclonggetTimestamp()
 
 Returns:
 Long value representing time at which this cell was "Put" into the row.  
Typically
@@ -459,7 +459,7 @@ extends 
 
 getTypeByte
-publicbytegetTypeByte()
+publicbytegetTypeByte()
 
 Returns:
 The byte representation of the KeyValue.TYPE of this cell: one of Put, 
Delete, etc
@@ -472,7 +472,7 @@ extends 
 
 getType
-publicCell.DataTypegetType()
+publicCell.DataTypegetType()
 Description copied from 
interface:ExtendedCell
 Returns the type of cell in a human readable format using 
Cell.DataType
  

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowByteBufferCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowByteBufferCell.html
 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowByteBufferCell.html
index ec7b862..83b13ff 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowByteBufferCell.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowByteBufferCell.html
@@ -131,7 +131,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class PrivateCellUtil.LastOnRowByteBufferCell
+private static class PrivateCellUtil.LastOnRowByteBufferCell
 extends PrivateCellUtil.EmptyByteBufferCell
 
 
@@ -266,7 +266,7 @@ extends PrivateCellUtil.EmptyByteBufferCell
-getFamilyArray,
 getFamilyByteBuffer,
 getFamilyLength,
 getFamilyOffset,
 getFamilyPosition,
 getQualifierArray,
 getQualifierByteBuffer,
 getQualifierLength, getQualifierOffset,
 getQualifierPosition,
 getRowArray,
 getRowOffset,
 getSequenceId,
 getTag,
 getTags,
 getTagsArray, getTagsByteBuffer,
 getTagsLength,
 getTagsOffset,
 getTagsPosition,
 

[34/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.html 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.html
index 9c5b56f..d68657d 100644
--- a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.html
+++ b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public final class PrivateCellUtil
+public final class PrivateCellUtil
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 Utility methods helpful slinging Cell instances. It has more powerful 
and
  rich set of APIs than those in CellUtil for internal usage.
@@ -881,7 +881,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 PrivateCellUtil
-privatePrivateCellUtil()
+privatePrivateCellUtil()
 Private constructor to keep this class from being 
instantiated.
 
 
@@ -899,7 +899,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 fillRowRange
-public staticByteRangefillRowRange(Cellcell,
+public staticByteRangefillRowRange(Cellcell,
  ByteRangerange)
 ByteRange
 
@@ -910,7 +910,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 fillFamilyRange
-public staticByteRangefillFamilyRange(Cellcell,
+public staticByteRangefillFamilyRange(Cellcell,
 ByteRangerange)
 
 
@@ -920,7 +920,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 fillQualifierRange
-public staticByteRangefillQualifierRange(Cellcell,
+public staticByteRangefillQualifierRange(Cellcell,
ByteRangerange)
 
 
@@ -930,7 +930,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 fillValueRange
-public staticByteRangefillValueRange(Cellcell,
+public staticByteRangefillValueRange(Cellcell,
ByteRangerange)
 
 
@@ -940,7 +940,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 fillTagRange
-public staticByteRangefillTagRange(Cellcell,
+public staticByteRangefillTagRange(Cellcell,
  ByteRangerange)
 
 
@@ -950,7 +950,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getRowByte
-public staticbytegetRowByte(Cellcell,
+public staticbytegetRowByte(Cellcell,
   intindex)
 misc
 
@@ -961,7 +961,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getQualifierByte
-public staticbytegetQualifierByte(Cellcell,
+public staticbytegetQualifierByte(Cellcell,
 intindex)
 
 
@@ -971,7 +971,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getValueBufferShallowCopy
-public statichttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffergetValueBufferShallowCopy(Cellcell)
+public statichttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffergetValueBufferShallowCopy(Cellcell)
 
 
 
@@ -980,7 +980,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 createCell
-public staticCellcreateCell(Cellcell,
+public staticCellcreateCell(Cellcell,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTagtags)
 
 Returns:
@@ -994,7 +994,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 createCell
-public staticCellcreateCell(Cellcell,
+public staticCellcreateCell(Cellcell,
   byte[]tags)
 
 Returns:
@@ -1008,7 +1008,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 createCell
-public staticCellcreateCell(Cellcell,
+public staticCellcreateCell(Cellcell,
   byte[]value,
   byte[]tags)
 
@@ -1019,7 +1019,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 matchingRows
-public staticbooleanmatchingRows(Cellleft,
+public staticbooleanmatchingRows(Cellleft,
byte[]buf,
intoffset,
intlength)
@@ -1031,7 +1031,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 matchingFamily
-public staticbooleanmatchingFamily(Cellleft,
+public staticbooleanmatchingFamily(Cellleft,
  byte[]buf,

[42/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/IndividualBytesFieldCell.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/IndividualBytesFieldCell.html 
b/devapidocs/org/apache/hadoop/hbase/IndividualBytesFieldCell.html
index e87db7c..0567828 100644
--- a/devapidocs/org/apache/hadoop/hbase/IndividualBytesFieldCell.html
+++ b/devapidocs/org/apache/hadoop/hbase/IndividualBytesFieldCell.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class IndividualBytesFieldCell
+public class IndividualBytesFieldCell
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements ExtendedCell
 
@@ -369,84 +369,72 @@ implements 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalTag
-getTag(bytetype)
-Returns the specific tag of the given type
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
-getTags()
-Creates a list of tags in the current cell
-
-
-
 byte[]
 getTagsArray()
 Contiguous raw bytes representing tags that may start at 
any index in the containing array.
 
 
-
+
 int
 getTagsLength()
 HBase internally uses 2 bytes to store tags length in 
Cell.
 
 
-
+
 int
 getTagsOffset()
 
-
+
 long
 getTimestamp()
 
-
+
 byte
 getTypeByte()
 
-
+
 byte[]
 getValueArray()
 Contiguous raw bytes that may start at any index in the 
containing array.
 
 
-
+
 int
 getValueLength()
 
-
+
 int
 getValueOffset()
 
-
+
 private long
 heapOverhead()
 
-
+
 long
 heapSize()
 Implement HeapSize interface
 
 
-
+
 void
 setSequenceId(longseqId)
 Sets with the given seqId.
 
 
-
+
 void
 setTimestamp(byte[]ts)
 Sets with the given timestamp.
 
 
-
+
 void
 setTimestamp(longts)
 Sets with the given timestamp.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 
@@ -470,7 +458,7 @@ implements 
 
 Methods inherited from interfaceorg.apache.hadoop.hbase.RawCell
-checkForTagsLength,
 cloneTags
+checkForTagsLength,
 cloneTags,
 getTag,
 getTags
 
 
 
@@ -492,7 +480,7 @@ implements 
 
 FIXED_OVERHEAD
-private static finallong FIXED_OVERHEAD
+private static finallong FIXED_OVERHEAD
 
 
 
@@ -501,7 +489,7 @@ implements 
 
 row
-private finalbyte[] row
+private finalbyte[] row
 
 
 
@@ -510,7 +498,7 @@ implements 
 
 rOffset
-private finalint rOffset
+private finalint rOffset
 
 
 
@@ -519,7 +507,7 @@ implements 
 
 rLength
-private finalint rLength
+private finalint rLength
 
 
 
@@ -528,7 +516,7 @@ implements 
 
 family
-private finalbyte[] family
+private finalbyte[] family
 
 
 
@@ -537,7 +525,7 @@ implements 
 
 fOffset
-private finalint fOffset
+private finalint fOffset
 
 
 
@@ -546,7 +534,7 @@ implements 
 
 fLength
-private finalint fLength
+private finalint fLength
 
 
 
@@ -555,7 +543,7 @@ implements 
 
 qualifier
-private finalbyte[] qualifier
+private finalbyte[] qualifier
 
 
 
@@ -564,7 +552,7 @@ implements 
 
 qOffset
-private finalint qOffset
+private finalint qOffset
 
 
 
@@ -573,7 +561,7 @@ implements 
 
 qLength
-private finalint qLength
+private finalint qLength
 
 
 
@@ -582,7 +570,7 @@ implements 
 
 value
-private finalbyte[] value
+private finalbyte[] value
 
 
 
@@ -591,7 +579,7 @@ implements 
 
 vOffset
-private finalint vOffset
+private finalint vOffset
 
 
 
@@ -600,7 +588,7 @@ implements 
 
 vLength
-private finalint vLength
+private finalint vLength
 
 
 
@@ -609,7 +597,7 @@ implements 
 
 tags
-private finalbyte[] tags
+private finalbyte[] tags
 
 
 
@@ -618,7 +606,7 @@ implements 
 
 tagsOffset
-private finalint tagsOffset
+private finalint tagsOffset
 
 
 
@@ -627,7 +615,7 @@ implements 
 
 tagsLength
-private finalint tagsLength
+private finalint tagsLength
 
 
 
@@ -636,7 +624,7 @@ implements 
 
 timestamp
-privatelong timestamp
+privatelong timestamp
 
 
 
@@ -645,7 +633,7 @@ implements 
 
 type
-private finalbyte type
+private finalbyte type
 
 
 
@@ -654,7 +642,7 @@ implements 
 
 

[02/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowCell.html
index 3400507..2baa140 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.LastOnRowCell.html
@@ -28,3034 +28,2926 @@
 020import static 
org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
 021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
 022
-023import 
com.google.common.annotations.VisibleForTesting;
-024
-025import java.io.DataOutput;
-026import java.io.DataOutputStream;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.math.BigDecimal;
-030import java.nio.ByteBuffer;
-031import java.util.ArrayList;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Optional;
-035
-036import 
org.apache.hadoop.hbase.KeyValue.Type;
-037import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-038import 
org.apache.hadoop.hbase.io.HeapSize;
-039import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-040import 
org.apache.hadoop.hbase.io.util.Dictionary;
-041import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-042import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-043import 
org.apache.hadoop.hbase.util.ByteRange;
-044import 
org.apache.hadoop.hbase.util.Bytes;
-045import 
org.apache.hadoop.hbase.util.ClassSize;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047
-048
-049/**
-050 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
-051 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
-052 */
-053@InterfaceAudience.Private
-054public final class PrivateCellUtil {
-055
-056  /**
-057   * Private constructor to keep this 
class from being instantiated.
-058   */
-059  private PrivateCellUtil() {
-060  }
+023import java.io.DataOutput;
+024import java.io.DataOutputStream;
+025import java.io.IOException;
+026import java.io.OutputStream;
+027import java.math.BigDecimal;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Iterator;
+031import java.util.List;
+032import java.util.Optional;
+033import 
org.apache.hadoop.hbase.KeyValue.Type;
+034import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
+035import 
org.apache.hadoop.hbase.io.HeapSize;
+036import 
org.apache.hadoop.hbase.io.TagCompressionContext;
+037import 
org.apache.hadoop.hbase.io.util.Dictionary;
+038import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+039import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+040import 
org.apache.hadoop.hbase.util.ByteRange;
+041import 
org.apache.hadoop.hbase.util.Bytes;
+042import 
org.apache.hadoop.hbase.util.ClassSize;
+043import 
org.apache.yetus.audience.InterfaceAudience;
+044
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+046
+047/**
+048 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
+049 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
+050 */
+051@InterfaceAudience.Private
+052public final class PrivateCellUtil {
+053
+054  /**
+055   * Private constructor to keep this 
class from being instantiated.
+056   */
+057  private PrivateCellUtil() {
+058  }
+059
+060  /*** ByteRange 
***/
 061
-062  /*** ByteRange 
***/
-063
-064  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-065return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-066  }
-067
-068  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-069return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-070  }
-071
-072  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-073return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-074  cell.getQualifierLength());
-075  }
-076
-077  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-078return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-079  }
-080
-081  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-082return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-083  }
+062  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
+063return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
+064  }
+065
+066  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
+067return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 

[29/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
index b66bab7..4ee0640 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.html
@@ -115,7 +115,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Private
  @InterfaceStability.Evolving
-public class HBaseAdmin
+public class HBaseAdmin
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements Admin
 HBaseAdmin is no longer a client API. It is marked 
InterfaceAudience.Private indicating that
@@ -1839,7 +1839,7 @@ implements 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -1848,7 +1848,7 @@ implements 
 
 connection
-privateClusterConnection connection
+privateClusterConnection connection
 
 
 
@@ -1857,7 +1857,7 @@ implements 
 
 conf
-private finalorg.apache.hadoop.conf.Configuration conf
+private finalorg.apache.hadoop.conf.Configuration conf
 
 
 
@@ -1866,7 +1866,7 @@ implements 
 
 pause
-private finallong pause
+private finallong pause
 
 
 
@@ -1875,7 +1875,7 @@ implements 
 
 numRetries
-private finalint numRetries
+private finalint numRetries
 
 
 
@@ -1884,7 +1884,7 @@ implements 
 
 syncWaitTimeout
-private finalint syncWaitTimeout
+private finalint syncWaitTimeout
 
 
 
@@ -1893,7 +1893,7 @@ implements 
 
 aborted
-privateboolean aborted
+privateboolean aborted
 
 
 
@@ -1902,7 +1902,7 @@ implements 
 
 operationTimeout
-privateint operationTimeout
+privateint operationTimeout
 
 
 
@@ -1911,7 +1911,7 @@ implements 
 
 rpcTimeout
-privateint rpcTimeout
+privateint rpcTimeout
 
 
 
@@ -1920,7 +1920,7 @@ implements 
 
 rpcCallerFactory
-privateRpcRetryingCallerFactory 
rpcCallerFactory
+privateRpcRetryingCallerFactory 
rpcCallerFactory
 
 
 
@@ -1929,7 +1929,7 @@ implements 
 
 rpcControllerFactory
-privateRpcControllerFactory rpcControllerFactory
+privateRpcControllerFactory rpcControllerFactory
 
 
 
@@ -1938,7 +1938,7 @@ implements 
 
 ng
-privateNonceGenerator ng
+privateNonceGenerator ng
 
 
 
@@ -1955,7 +1955,7 @@ implements 
 
 HBaseAdmin
-HBaseAdmin(ClusterConnectionconnection)
+HBaseAdmin(ClusterConnectionconnection)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -1977,7 +1977,7 @@ implements 
 
 getOperationTimeout
-publicintgetOperationTimeout()
+publicintgetOperationTimeout()
 
 Specified by:
 getOperationTimeoutin
 interfaceAdmin
@@ -1990,7 +1990,7 @@ implements 
 
 abort
-publicvoidabort(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringwhy,
+publicvoidabort(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringwhy,
   http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true;
 title="class or interface in java.lang">Throwablee)
 Description copied from 
interface:Abortable
 Abort the server or client.
@@ -2011,7 +2011,7 @@ implements 
 
 isAborted
-publicbooleanisAborted()
+publicbooleanisAborted()
 Description copied from 
interface:Abortable
 Check if the server or client was aborted.
 
@@ -2030,7 +2030,7 @@ implements 
 
 abortProcedure
-publicbooleanabortProcedure(longprocId,
+publicbooleanabortProcedure(longprocId,
   booleanmayInterruptIfRunning)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:Admin
@@ -2054,7 +2054,7 @@ implements 
 
 abortProcedureAsync
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">BooleanabortProcedureAsync(longprocId,
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/Future.html?is-external=true;
 title="class or interface in java.util.concurrent">Futurehttp://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true;
 title="class or interface in java.lang">BooleanabortProcedureAsync(longprocId,
booleanmayInterruptIfRunning)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description 

[47/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
index 8b6f080..1b7a148 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
@@ -26,954 +26,938 @@
 018
 019package org.apache.hadoop.hbase.client;
 020
-021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
-022
-023import java.io.IOException;
-024import java.nio.ByteBuffer;
-025import java.util.ArrayList;
-026import java.util.Arrays;
-027import java.util.HashMap;
-028import java.util.Iterator;
-029import java.util.List;
-030import java.util.Map;
-031import java.util.NavigableMap;
-032import java.util.Optional;
-033import java.util.TreeMap;
-034import java.util.UUID;
-035import java.util.stream.Collectors;
-036import 
org.apache.hadoop.hbase.ArrayBackedTag;
-037import org.apache.hadoop.hbase.Cell;
-038import 
org.apache.hadoop.hbase.CellScannable;
-039import 
org.apache.hadoop.hbase.CellScanner;
-040import 
org.apache.hadoop.hbase.CellUtil;
-041import 
org.apache.hadoop.hbase.ExtendedCell;
-042import 
org.apache.hadoop.hbase.HConstants;
-043import 
org.apache.hadoop.hbase.KeyValue;
-044import 
org.apache.hadoop.hbase.PrivateCellUtil;
-045import org.apache.hadoop.hbase.RawCell;
-046import org.apache.hadoop.hbase.Tag;
-047import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-048import 
org.apache.hadoop.hbase.io.HeapSize;
-049import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-050import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
-051import 
org.apache.hadoop.hbase.security.access.AccessControlConstants;
-052import 
org.apache.hadoop.hbase.security.access.AccessControlUtil;
-053import 
org.apache.hadoop.hbase.security.access.Permission;
-054import 
org.apache.hadoop.hbase.security.visibility.CellVisibility;
-055import 
org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
-056import 
org.apache.hadoop.hbase.util.Bytes;
-057import 
org.apache.hadoop.hbase.util.ClassSize;
-058import 
org.apache.yetus.audience.InterfaceAudience;
-059
-060import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-061import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
-062import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
-063import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-064import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput;
-065import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput;
-066import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams;
-067
-068@InterfaceAudience.Public
-069public abstract class Mutation extends 
OperationWithAttributes implements Row, CellScannable,
-070HeapSize {
-071  public static final long 
MUTATION_OVERHEAD = ClassSize.align(
-072  // This
-073  ClassSize.OBJECT +
-074  // row + 
OperationWithAttributes.attributes
-075  2 * ClassSize.REFERENCE +
-076  // Timestamp
-077  1 * Bytes.SIZEOF_LONG +
-078  // durability
-079  ClassSize.REFERENCE +
-080  // familyMap
-081  ClassSize.REFERENCE +
-082  // familyMap
-083  ClassSize.TREEMAP +
-084  // priority
-085  ClassSize.INTEGER
-086  );
-087
-088  /**
-089   * The attribute for storing the list 
of clusters that have consumed the change.
-090   */
-091  private static final String 
CONSUMED_CLUSTER_IDS = "_cs.id";
-092
-093  /**
-094   * The attribute for storing TTL for 
the result of the mutation.
-095   */
-096  private static final String 
OP_ATTRIBUTE_TTL = "_ttl";
-097
-098  private static final String 
RETURN_RESULTS = "_rr_";
-099
-100  // TODO: row should be final
-101  protected byte [] row = null;
-102  protected long ts = 
HConstants.LATEST_TIMESTAMP;
-103  protected Durability durability = 
Durability.USE_DEFAULT;
+021import java.io.IOException;
+022import java.nio.ByteBuffer;
+023import java.util.ArrayList;
+024import java.util.Arrays;
+025import java.util.HashMap;
+026import java.util.Iterator;
+027import java.util.List;
+028import java.util.Map;
+029import java.util.NavigableMap;
+030import java.util.Optional;
+031import java.util.TreeMap;
+032import java.util.UUID;
+033import java.util.stream.Collectors;
+034import org.apache.hadoop.hbase.Cell;
+035import 
org.apache.hadoop.hbase.CellScannable;
+036import 
org.apache.hadoop.hbase.CellScanner;
+037import 
org.apache.hadoop.hbase.CellUtil;
+038import 
org.apache.hadoop.hbase.ExtendedCell;
+039import 
org.apache.hadoop.hbase.HConstants;
+040import 
org.apache.hadoop.hbase.KeyValue;
+041import 
org.apache.hadoop.hbase.PrivateCellUtil;
+042import org.apache.hadoop.hbase.RawCell;
+043import 

[25/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
index 89514fe..01f3365 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OnheapDecodedCell.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-protected static class BufferedDataBlockEncoder.OnheapDecodedCell
+protected static class BufferedDataBlockEncoder.OnheapDecodedCell
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements ExtendedCell
 Copies only the key part of the keybuffer by doing a deep 
copy and passes the
@@ -343,89 +343,77 @@ implements getSerializedSize(booleanwithTags)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalTag
-getTag(bytetype)
-Returns the specific tag of the given type
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
-getTags()
-Creates a list of tags in the current cell
-
-
-
 byte[]
 getTagsArray()
 Contiguous raw bytes representing tags that may start at 
any index in the containing array.
 
 
-
+
 int
 getTagsLength()
 HBase internally uses 2 bytes to store tags length in 
Cell.
 
 
-
+
 int
 getTagsOffset()
 
-
+
 long
 getTimestamp()
 
-
+
 byte
 getTypeByte()
 
-
+
 byte[]
 getValueArray()
 Contiguous raw bytes that may start at any index in the 
containing array.
 
 
-
+
 int
 getValueLength()
 
-
+
 int
 getValueOffset()
 
-
+
 long
 heapSize()
 
-
+
 void
 setSequenceId(longseqId)
 Sets with the given seqId.
 
 
-
+
 void
 setTimestamp(byte[]ts)
 Sets with the given timestamp.
 
 
-
+
 void
 setTimestamp(longts)
 Sets with the given timestamp.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 
-
+
 void
 write(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferbuf,
  intoffset)
 Write this Cell into the given buf's offset in a KeyValue format.
 
 
-
+
 int
 write(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamout,
  booleanwithTags)
@@ -452,7 +440,7 @@ implements 
 
 Methods inherited from interfaceorg.apache.hadoop.hbase.RawCell
-checkForTagsLength,
 cloneTags
+checkForTagsLength,
 cloneTags,
 getTag,
 getTags
 
 
 
@@ -474,7 +462,7 @@ implements 
 
 FIXED_OVERHEAD
-private static finallong FIXED_OVERHEAD
+private static finallong FIXED_OVERHEAD
 
 
 
@@ -483,7 +471,7 @@ implements 
 
 keyOnlyBuffer
-privatebyte[] keyOnlyBuffer
+privatebyte[] keyOnlyBuffer
 
 
 
@@ -492,7 +480,7 @@ implements 
 
 rowLength
-privateshort rowLength
+privateshort rowLength
 
 
 
@@ -501,7 +489,7 @@ implements 
 
 familyOffset
-privateint familyOffset
+privateint familyOffset
 
 
 
@@ -510,7 +498,7 @@ implements 
 
 familyLength
-privatebyte familyLength
+privatebyte familyLength
 
 
 
@@ -519,7 +507,7 @@ implements 
 
 qualifierOffset
-privateint qualifierOffset
+privateint qualifierOffset
 
 
 
@@ -528,7 +516,7 @@ implements 
 
 qualifierLength
-privateint qualifierLength
+privateint qualifierLength
 
 
 
@@ -537,7 +525,7 @@ implements 
 
 timestamp
-privatelong timestamp
+privatelong timestamp
 
 
 
@@ -546,7 +534,7 @@ implements 
 
 typeByte
-privatebyte typeByte
+privatebyte typeByte
 
 
 
@@ -555,7 +543,7 @@ implements 
 
 valueBuffer
-privatebyte[] valueBuffer
+privatebyte[] valueBuffer
 
 
 
@@ -564,7 +552,7 @@ implements 
 
 valueOffset
-privateint valueOffset
+privateint valueOffset
 
 
 
@@ -573,7 +561,7 @@ implements 
 
 valueLength

[22/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactSplit.Rejection.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactSplit.Rejection.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/CompactSplit.Rejection.html
index 386a496..364a741 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactSplit.Rejection.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/CompactSplit.Rejection.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class CompactSplit.Rejection
+private static class CompactSplit.Rejection
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/RejectedExecutionHandler.html?is-external=true;
 title="class or interface in 
java.util.concurrent">RejectedExecutionHandler
 Cleanup class to use when rejecting a compaction request 
from the queue.
@@ -191,7 +191,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 Rejection
-privateRejection()
+privateRejection()
 
 
 
@@ -208,7 +208,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/concurren
 
 
 rejectedExecution
-publicvoidrejectedExecution(http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnablerunnable,
+publicvoidrejectedExecution(http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnablerunnable,
   http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ThreadPoolExecutor.html?is-external=true;
 title="class or interface in 
java.util.concurrent">ThreadPoolExecutorpool)
 
 Specified by:

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactSplit.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactSplit.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/CompactSplit.html
index 2157269..f1d7e1a 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/CompactSplit.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/CompactSplit.html
@@ -670,7 +670,7 @@ implements 
 
 COMPARATOR
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">Comparatorhttp://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable COMPARATOR
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Comparator.html?is-external=true;
 title="class or interface in java.util">Comparatorhttp://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable COMPARATOR
 
 
 
@@ -926,7 +926,7 @@ implements 
 
 join
-voidjoin()
+voidjoin()
 
 
 
@@ -935,7 +935,7 @@ implements 
 
 getCompactionQueueSize
-publicintgetCompactionQueueSize()
+publicintgetCompactionQueueSize()
 Returns the current size of the queue containing regions 
that are
  processed.
 
@@ -950,7 +950,7 @@ implements 
 
 getLargeCompactionQueueSize
-publicintgetLargeCompactionQueueSize()
+publicintgetLargeCompactionQueueSize()
 
 
 
@@ -959,7 +959,7 @@ implements 
 
 getSmallCompactionQueueSize
-publicintgetSmallCompactionQueueSize()
+publicintgetSmallCompactionQueueSize()
 
 
 
@@ -968,7 +968,7 @@ implements 
 
 getSplitQueueSize
-publicintgetSplitQueueSize()
+publicintgetSplitQueueSize()
 
 
 
@@ -977,7 +977,7 @@ implements 
 
 shouldSplitRegion
-privatebooleanshouldSplitRegion()
+privatebooleanshouldSplitRegion()
 
 
 
@@ -986,7 +986,7 @@ implements 
 
 getRegionSplitLimit
-publicintgetRegionSplitLimit()
+publicintgetRegionSplitLimit()
 
 Returns:
 the regionSplitLimit
@@ -999,7 +999,7 @@ implements 
 
 onConfigurationChange
-publicvoidonConfigurationChange(org.apache.hadoop.conf.ConfigurationnewConf)
+publicvoidonConfigurationChange(org.apache.hadoop.conf.ConfigurationnewConf)
 This method would be called by the ConfigurationManager
  object when the Configuration object is reloaded from disk.
 
@@ -1014,7 +1014,7 @@ implements 
 
 getSmallCompactionThreadNum
-protectedintgetSmallCompactionThreadNum()
+protectedintgetSmallCompactionThreadNum()
 
 
 
@@ -1023,7 +1023,7 @@ implements 
 
 getLargeCompactionThreadNum
-protectedintgetLargeCompactionThreadNum()
+protectedintgetLargeCompactionThreadNum()
 
 
 
@@ -1032,7 +1032,7 @@ implements 
 
 getSplitThreadNum
-protectedintgetSplitThreadNum()
+protectedintgetSplitThreadNum()
 
 
 
@@ -1041,7 +1041,7 @@ implements 
 
 registerChildren

[30/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyColumnFamilyFuture.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyColumnFamilyFuture.html
 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyColumnFamilyFuture.html
index 262c208..adaa117 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyColumnFamilyFuture.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyColumnFamilyFuture.html
@@ -132,7 +132,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HBaseAdmin.ModifyColumnFamilyFuture
+private static class HBaseAdmin.ModifyColumnFamilyFuture
 extends HBaseAdmin.ModifyTableFuture
 
 
@@ -246,7 +246,7 @@ extends 
 
 ModifyColumnFamilyFuture
-publicModifyColumnFamilyFuture(HBaseAdminadmin,
+publicModifyColumnFamilyFuture(HBaseAdminadmin,
 TableNametableName,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponseresponse)
 
@@ -265,7 +265,7 @@ extends 
 
 getOperationType
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOperationType()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOperationType()
 
 Overrides:
 getOperationTypein
 classHBaseAdmin.ModifyTableFuture

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
index 08df163..b669513 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.ModifyTableFuture.html
@@ -131,7 +131,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HBaseAdmin.ModifyTableFuture
+private static class HBaseAdmin.ModifyTableFuture
 extends HBaseAdmin.TableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
 
 
@@ -250,7 +250,7 @@ extends 
 
 ModifyTableFuture
-publicModifyTableFuture(HBaseAdminadmin,
+publicModifyTableFuture(HBaseAdminadmin,
  TableNametableName,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponseresponse)
 
@@ -261,7 +261,7 @@ extends 
 
 ModifyTableFuture
-publicModifyTableFuture(HBaseAdminadmin,
+publicModifyTableFuture(HBaseAdminadmin,
  TableNametableName,
  http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or interface in java.lang">LongprocId)
 
@@ -280,7 +280,7 @@ extends 
 
 getOperationType
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOperationType()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOperationType()
 
 Specified by:
 getOperationTypein
 classHBaseAdmin.TableFuturehttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Void
@@ -295,7 +295,7 @@ extends 
 
 postOperationResult
-protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">VoidpostOperationResult(http://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Voidresult,
+protectedhttp://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">VoidpostOperationResult(http://docs.oracle.com/javase/8/docs/api/java/lang/Void.html?is-external=true;
 title="class or interface in java.lang">Voidresult,
longdeadlineTs)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/TimeoutException.html?is-external=true;
 title="class or interface in java.util.concurrent">TimeoutException

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/client/HBaseAdmin.NamespaceFuture.html
--
diff --git 

[23/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html 
b/devapidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
index 9d0bd6f..afc9a5b 100644
--- a/devapidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
+++ b/devapidocs/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
@@ -475,7 +475,7 @@ extends 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
 
 COMPRESSION_FAMILIES_CONF_KEY
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String COMPRESSION_FAMILIES_CONF_KEY
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String COMPRESSION_FAMILIES_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -488,7 +488,7 @@ extends 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
 
 BLOOM_TYPE_FAMILIES_CONF_KEY
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String BLOOM_TYPE_FAMILIES_CONF_KEY
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String BLOOM_TYPE_FAMILIES_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -501,7 +501,7 @@ extends 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
 
 BLOCK_SIZE_FAMILIES_CONF_KEY
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String BLOCK_SIZE_FAMILIES_CONF_KEY
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String BLOCK_SIZE_FAMILIES_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -514,7 +514,7 @@ extends 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
 
 DATABLOCK_ENCODING_FAMILIES_CONF_KEY
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DATABLOCK_ENCODING_FAMILIES_CONF_KEY
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DATABLOCK_ENCODING_FAMILIES_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -527,7 +527,7 @@ extends 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
 
 DATABLOCK_ENCODING_OVERRIDE_CONF_KEY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DATABLOCK_ENCODING_OVERRIDE_CONF_KEY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DATABLOCK_ENCODING_OVERRIDE_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -540,7 +540,7 @@ extends 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
 
 LOCALITY_SENSITIVE_CONF_KEY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String LOCALITY_SENSITIVE_CONF_KEY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String LOCALITY_SENSITIVE_CONF_KEY
 Keep locality while generating HFiles for bulkload. See 
HBASE-12596
 
 See Also:
@@ -554,7 +554,7 @@ extends 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
 
 DEFAULT_LOCALITY_SENSITIVE
-private static finalboolean DEFAULT_LOCALITY_SENSITIVE
+private static finalboolean DEFAULT_LOCALITY_SENSITIVE
 
 See Also:
 Constant
 Field Values
@@ -567,7 +567,7 @@ extends 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
 
 OUTPUT_TABLE_NAME_CONF_KEY
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String OUTPUT_TABLE_NAME_CONF_KEY
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String OUTPUT_TABLE_NAME_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -580,7 +580,7 @@ extends 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat
 
 MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -593,7 +593,7 @@ extends 

[41/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
index 321a54e..6636989 100644
--- a/devapidocs/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class KeyValue.KeyOnlyKeyValue
+public static class KeyValue.KeyOnlyKeyValue
 extends KeyValue
 A simple form of KeyValue that creates a keyvalue with only 
the key part of the byte[]
  Mainly used in places where we need to compare two cells.  Avoids copying of 
bytes
@@ -394,7 +394,7 @@ extends KeyValue
-checkParameters,
 clone, 
create,
 create,
 createKeyOnly,
 deepClone,
 getBuffer,
 getDelimiter,
 getDelimiterInReverse,
 getFamilyLength, getKeyDataStructureSize,
 getKeyString,
 getKeyValueDataStructureSize,
 getKeyValueDataStructureSize,
 getKeyValueDataStructureSize,
 getLength,
 getOffset,
 getSequenceId,
 getSerializedSize, getTag,
 getTags, 
getTimestamp,
 humanReadableTimestamp,
 isLatestTimestamp,
 keyToString,
 keyToString,
 oswrite,
 setSequenceId,
 setTimestamp,
 setTimestamp,
 shallowCopy,
 toStringMap,
 updateLatestStamp,
 write,
 write,
 writeByteArray
+checkParameters,
 clone, 
create,
 create,
 createKeyOnly,
 deepClone,
 getBuffer,
 getDelimiter,
 getDelimiterInReverse,
 getFamilyLength, getKeyDataStructureSize,
 getKeyString,
 getKeyValueDataStructureSize,
 getKeyValueDataStructureSize,
 getKeyValueDataStructureSize,
 getLength,
 getOffset,
 getSequenceId,
 getSerializedSize, getTimestamp,
 humanReadableTimestamp,
 isLatestTimestamp,
 keyToString,
 keyToString,
 oswrite,
 setSequenceId,
 setTimestamp,
 set
 Timestamp, shallowCopy,
 toStringMap,
 updateLatestStamp,
 write,
 write,
 writeByteArray
 
 
 
@@ -415,7 +415,7 @@ extends RawCell
-checkForTagsLength,
 cloneTags
+checkForTagsLength,
 cloneTags,
 getTag,
 getTags
 
 
 
@@ -437,7 +437,7 @@ extends 
 
 rowLen
-privateshort rowLen
+privateshort rowLen
 
 
 
@@ -454,7 +454,7 @@ extends 
 
 KeyOnlyKeyValue
-publicKeyOnlyKeyValue()
+publicKeyOnlyKeyValue()
 
 
 
@@ -463,7 +463,7 @@ extends 
 
 KeyOnlyKeyValue
-publicKeyOnlyKeyValue(byte[]b)
+publicKeyOnlyKeyValue(byte[]b)
 
 
 
@@ -472,7 +472,7 @@ extends 
 
 KeyOnlyKeyValue
-publicKeyOnlyKeyValue(byte[]b,
+publicKeyOnlyKeyValue(byte[]b,
intoffset,
intlength)
 
@@ -491,7 +491,7 @@ extends 
 
 set
-publicvoidset(KeyValue.KeyOnlyKeyValuekeyOnlyKeyValue)
+publicvoidset(KeyValue.KeyOnlyKeyValuekeyOnlyKeyValue)
 
 
 
@@ -500,7 +500,7 @@ extends 
 
 clear
-publicvoidclear()
+publicvoidclear()
 
 
 
@@ -509,7 +509,7 @@ extends 
 
 getKeyOffset
-publicintgetKeyOffset()
+publicintgetKeyOffset()
 
 Overrides:
 getKeyOffsetin
 classKeyValue
@@ -524,7 +524,7 @@ extends 
 
 setKey
-publicvoidsetKey(byte[]key,
+publicvoidsetKey(byte[]key,
intoffset,
intlength)
 A setter that helps to avoid object creation every time and 
whenever
@@ -543,7 +543,7 @@ extends 
 
 getKey
-publicbyte[]getKey()
+publicbyte[]getKey()
 Description copied from 
class:KeyValue
 Do not use unless you have to. Used internally for 
compacting and testing. Use
  KeyValue.getRowArray(),
 KeyValue.getFamilyArray(),
 KeyValue.getQualifierArray(),
 and
@@ -562,7 +562,7 @@ extends 
 
 getRowArray
-publicbyte[]getRowArray()
+publicbyte[]getRowArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.
@@ -582,7 +582,7 @@ extends 
 
 getRowOffset
-publicintgetRowOffset()
+publicintgetRowOffset()
 
 Specified by:
 getRowOffsetin
 interfaceCell
@@ -599,7 +599,7 @@ extends 
 
 getFamilyArray
-publicbyte[]getFamilyArray()
+publicbyte[]getFamilyArray()
 Description copied from 
interface:Cell
 Contiguous bytes composed of legal HDFS filename characters 
which may start at any index in the
  containing array. Max length is Byte.MAX_VALUE, which is 127 bytes.
@@ -619,7 +619,7 @@ extends 
 
 getFamilyLength
-publicbytegetFamilyLength()
+publicbytegetFamilyLength()
 
 Specified by:
 getFamilyLengthin
 interfaceCell
@@ -636,7 +636,7 @@ extends 
 
 getFamilyOffset
-publicintgetFamilyOffset()
+publicintgetFamilyOffset()
 
 Specified by:
 getFamilyOffsetin
 interfaceCell
@@ -653,7 +653,7 @@ extends 
 
 getQualifierArray
-publicbyte[]getQualifierArray()
+publicbyte[]getQualifierArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array.
 
@@ -672,7 +672,7 @@ extends 
 
 getQualifierLength
-publicintgetQualifierLength()
+publicintgetQualifierLength()
 
 Specified by:
 getQualifierLengthin
 

[16/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.MetaComparator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.MetaComparator.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.MetaComparator.html
index f8eace7..66b6656 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.MetaComparator.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.MetaComparator.html
@@ -27,2569 +27,2540 @@
 019 */
 020package org.apache.hadoop.hbase;
 021
-022import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
-023import static 
org.apache.hadoop.hbase.util.Bytes.len;
-024
-025import java.io.DataInput;
-026import java.io.DataOutput;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.nio.ByteBuffer;
-030import java.util.ArrayList;
-031import java.util.Arrays;
-032import java.util.HashMap;
-033import java.util.Iterator;
-034import java.util.List;
-035import java.util.Map;
-036import java.util.Optional;
-037
-038import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-039import 
org.apache.hadoop.hbase.util.Bytes;
-040import 
org.apache.hadoop.hbase.util.ClassSize;
-041import 
org.apache.hadoop.io.RawComparator;
-042import 
org.apache.yetus.audience.InterfaceAudience;
-043import org.slf4j.Logger;
-044import org.slf4j.LoggerFactory;
-045
-046import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-047
-048/**
-049 * An HBase Key/Value. This is the 
fundamental HBase Type.
+022import static 
org.apache.hadoop.hbase.util.Bytes.len;
+023
+024import java.io.DataInput;
+025import java.io.DataOutput;
+026import java.io.IOException;
+027import java.io.OutputStream;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Arrays;
+031import java.util.HashMap;
+032import java.util.Iterator;
+033import java.util.List;
+034import java.util.Map;
+035import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+036import 
org.apache.hadoop.hbase.util.Bytes;
+037import 
org.apache.hadoop.hbase.util.ClassSize;
+038import 
org.apache.hadoop.io.RawComparator;
+039import 
org.apache.yetus.audience.InterfaceAudience;
+040import org.slf4j.Logger;
+041import org.slf4j.LoggerFactory;
+042
+043import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+044
+045/**
+046 * An HBase Key/Value. This is the 
fundamental HBase Type.
+047 * p
+048 * HBase applications and users should 
use the Cell interface and avoid directly using KeyValue and
+049 * member functions not defined in 
Cell.
 050 * p
-051 * HBase applications and users should 
use the Cell interface and avoid directly using KeyValue and
-052 * member functions not defined in 
Cell.
-053 * p
-054 * If being used client-side, the primary 
methods to access individual fields are
-055 * {@link #getRowArray()}, {@link 
#getFamilyArray()}, {@link #getQualifierArray()},
-056 * {@link #getTimestamp()}, and {@link 
#getValueArray()}. These methods allocate new byte arrays
-057 * and return copies. Avoid their use 
server-side.
-058 * p
-059 * Instances of this class are immutable. 
They do not implement Comparable but Comparators are
-060 * provided. Comparators change with 
context, whether user table or a catalog table comparison. Its
-061 * critical you use the appropriate 
comparator. There are Comparators for normal HFiles, Meta's
-062 * Hfiles, and bloom filter keys.
-063 * p
-064 * KeyValue wraps a byte array and takes 
offsets and lengths into passed array at where to start
-065 * interpreting the content as KeyValue. 
The KeyValue format inside a byte array is:
-066 * codelt;keylengthgt; 
lt;valuelengthgt; lt;keygt; 
lt;valuegt;/code Key is further
-067 * decomposed as: 
codelt;rowlengthgt; lt;rowgt; 
lt;columnfamilylengthgt;
-068 * lt;columnfamilygt; 
lt;columnqualifiergt;
-069 * lt;timestampgt; 
lt;keytypegt;/code The coderowlength/code 
maximum is
-070 * 
codeShort.MAX_SIZE/code, column family length maximum is 
codeByte.MAX_SIZE/code, and
-071 * column qualifier + key length must be 
lt; codeInteger.MAX_SIZE/code. The column does not
-072 * contain the family/qualifier 
delimiter, {@link #COLUMN_FAMILY_DELIMITER}br
-073 * KeyValue can optionally contain Tags. 
When it contains tags, it is added in the byte array after
-074 * the value part. The format for this 
part is: 
codelt;tagslengthgt;lt;tagsbytesgt;/code.
-075 * codetagslength/code 
maximum is codeShort.MAX_SIZE/code. The 
codetagsbytes/code
-076 * contain one or more tags where as each 
tag is of the form
-077 * 
codelt;taglengthgt;lt;tagtypegt;lt;tagbytesgt;/code.
 codetagtype/code is one byte
-078 * and codetaglength/code 
maximum is codeShort.MAX_SIZE/code and it includes 1 byte 
type
-079 * length and actual tag bytes length.
-080 */
-081@InterfaceAudience.Private
-082public class KeyValue implements 
ExtendedCell {
-083  

[13/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.html
index f8eace7..66b6656 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.html
@@ -27,2569 +27,2540 @@
 019 */
 020package org.apache.hadoop.hbase;
 021
-022import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
-023import static 
org.apache.hadoop.hbase.util.Bytes.len;
-024
-025import java.io.DataInput;
-026import java.io.DataOutput;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.nio.ByteBuffer;
-030import java.util.ArrayList;
-031import java.util.Arrays;
-032import java.util.HashMap;
-033import java.util.Iterator;
-034import java.util.List;
-035import java.util.Map;
-036import java.util.Optional;
-037
-038import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-039import 
org.apache.hadoop.hbase.util.Bytes;
-040import 
org.apache.hadoop.hbase.util.ClassSize;
-041import 
org.apache.hadoop.io.RawComparator;
-042import 
org.apache.yetus.audience.InterfaceAudience;
-043import org.slf4j.Logger;
-044import org.slf4j.LoggerFactory;
-045
-046import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-047
-048/**
-049 * An HBase Key/Value. This is the 
fundamental HBase Type.
+022import static 
org.apache.hadoop.hbase.util.Bytes.len;
+023
+024import java.io.DataInput;
+025import java.io.DataOutput;
+026import java.io.IOException;
+027import java.io.OutputStream;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Arrays;
+031import java.util.HashMap;
+032import java.util.Iterator;
+033import java.util.List;
+034import java.util.Map;
+035import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+036import 
org.apache.hadoop.hbase.util.Bytes;
+037import 
org.apache.hadoop.hbase.util.ClassSize;
+038import 
org.apache.hadoop.io.RawComparator;
+039import 
org.apache.yetus.audience.InterfaceAudience;
+040import org.slf4j.Logger;
+041import org.slf4j.LoggerFactory;
+042
+043import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+044
+045/**
+046 * An HBase Key/Value. This is the 
fundamental HBase Type.
+047 * p
+048 * HBase applications and users should 
use the Cell interface and avoid directly using KeyValue and
+049 * member functions not defined in 
Cell.
 050 * p
-051 * HBase applications and users should 
use the Cell interface and avoid directly using KeyValue and
-052 * member functions not defined in 
Cell.
-053 * p
-054 * If being used client-side, the primary 
methods to access individual fields are
-055 * {@link #getRowArray()}, {@link 
#getFamilyArray()}, {@link #getQualifierArray()},
-056 * {@link #getTimestamp()}, and {@link 
#getValueArray()}. These methods allocate new byte arrays
-057 * and return copies. Avoid their use 
server-side.
-058 * p
-059 * Instances of this class are immutable. 
They do not implement Comparable but Comparators are
-060 * provided. Comparators change with 
context, whether user table or a catalog table comparison. Its
-061 * critical you use the appropriate 
comparator. There are Comparators for normal HFiles, Meta's
-062 * Hfiles, and bloom filter keys.
-063 * p
-064 * KeyValue wraps a byte array and takes 
offsets and lengths into passed array at where to start
-065 * interpreting the content as KeyValue. 
The KeyValue format inside a byte array is:
-066 * codelt;keylengthgt; 
lt;valuelengthgt; lt;keygt; 
lt;valuegt;/code Key is further
-067 * decomposed as: 
codelt;rowlengthgt; lt;rowgt; 
lt;columnfamilylengthgt;
-068 * lt;columnfamilygt; 
lt;columnqualifiergt;
-069 * lt;timestampgt; 
lt;keytypegt;/code The coderowlength/code 
maximum is
-070 * 
codeShort.MAX_SIZE/code, column family length maximum is 
codeByte.MAX_SIZE/code, and
-071 * column qualifier + key length must be 
lt; codeInteger.MAX_SIZE/code. The column does not
-072 * contain the family/qualifier 
delimiter, {@link #COLUMN_FAMILY_DELIMITER}br
-073 * KeyValue can optionally contain Tags. 
When it contains tags, it is added in the byte array after
-074 * the value part. The format for this 
part is: 
codelt;tagslengthgt;lt;tagsbytesgt;/code.
-075 * codetagslength/code 
maximum is codeShort.MAX_SIZE/code. The 
codetagsbytes/code
-076 * contain one or more tags where as each 
tag is of the form
-077 * 
codelt;taglengthgt;lt;tagtypegt;lt;tagbytesgt;/code.
 codetagtype/code is one byte
-078 * and codetaglength/code 
maximum is codeShort.MAX_SIZE/code and it includes 1 byte 
type
-079 * length and actual tag bytes length.
-080 */
-081@InterfaceAudience.Private
-082public class KeyValue implements 
ExtendedCell {
-083  private static final 
ArrayListTag EMPTY_ARRAY_LIST = new ArrayList();
-084

[20/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/ByteBufferKeyValue.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ByteBufferKeyValue.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ByteBufferKeyValue.html
index 8112448..71d0648 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ByteBufferKeyValue.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ByteBufferKeyValue.html
@@ -25,367 +25,333 @@
 017 */
 018package org.apache.hadoop.hbase;
 019
-020import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
-021
-022import java.io.IOException;
-023import java.io.OutputStream;
-024import java.nio.ByteBuffer;
-025import java.util.ArrayList;
-026import java.util.Iterator;
-027import java.util.List;
-028import java.util.Optional;
+020import java.io.IOException;
+021import java.io.OutputStream;
+022import java.nio.ByteBuffer;
+023import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+024import 
org.apache.hadoop.hbase.util.Bytes;
+025import 
org.apache.hadoop.hbase.util.ClassSize;
+026import 
org.apache.yetus.audience.InterfaceAudience;
+027
+028import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 029
-030import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-031import 
org.apache.hadoop.hbase.util.Bytes;
-032import 
org.apache.hadoop.hbase.util.ClassSize;
-033import 
org.apache.yetus.audience.InterfaceAudience;
-034
-035import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+030/**
+031 * This Cell is an implementation of 
{@link ByteBufferCell} where the data resides in
+032 * off heap/ on heap ByteBuffer
+033 */
+034@InterfaceAudience.Private
+035public class ByteBufferKeyValue extends 
ByteBufferCell implements ExtendedCell {
 036
-037/**
-038 * This Cell is an implementation of 
{@link ByteBufferCell} where the data resides in
-039 * off heap/ on heap ByteBuffer
-040 */
-041@InterfaceAudience.Private
-042public class ByteBufferKeyValue extends 
ByteBufferCell implements ExtendedCell {
-043
-044  protected final ByteBuffer buf;
-045  protected final int offset;
-046  protected final int length;
-047  private long seqId = 0;
-048
-049  public static final int FIXED_OVERHEAD 
= ClassSize.OBJECT + ClassSize.REFERENCE
-050  + (2 * Bytes.SIZEOF_INT) + 
Bytes.SIZEOF_LONG;
+037  protected final ByteBuffer buf;
+038  protected final int offset;
+039  protected final int length;
+040  private long seqId = 0;
+041
+042  public static final int FIXED_OVERHEAD 
= ClassSize.OBJECT + ClassSize.REFERENCE
+043  + (2 * Bytes.SIZEOF_INT) + 
Bytes.SIZEOF_LONG;
+044
+045  public ByteBufferKeyValue(ByteBuffer 
buf, int offset, int length, long seqId) {
+046this.buf = buf;
+047this.offset = offset;
+048this.length = length;
+049this.seqId = seqId;
+050  }
 051
-052  public ByteBufferKeyValue(ByteBuffer 
buf, int offset, int length, long seqId) {
+052  public ByteBufferKeyValue(ByteBuffer 
buf, int offset, int length) {
 053this.buf = buf;
 054this.offset = offset;
 055this.length = length;
-056this.seqId = seqId;
-057  }
-058
-059  public ByteBufferKeyValue(ByteBuffer 
buf, int offset, int length) {
-060this.buf = buf;
-061this.offset = offset;
-062this.length = length;
-063  }
-064
-065  @VisibleForTesting
-066  public ByteBuffer getBuffer() {
-067return this.buf;
-068  }
-069
-070  @VisibleForTesting
-071  public int getOffset() {
-072return this.offset;
-073  }
-074
-075  @Override
-076  public byte[] getRowArray() {
-077return CellUtil.cloneRow(this);
-078  }
-079
-080  @Override
-081  public int getRowOffset() {
-082return 0;
-083  }
-084
-085  @Override
-086  public short getRowLength() {
-087return getRowLen();
-088  }
-089
-090  private short getRowLen() {
-091return 
ByteBufferUtils.toShort(this.buf, this.offset + KeyValue.ROW_OFFSET);
-092  }
-093
-094  @Override
-095  public byte[] getFamilyArray() {
-096return CellUtil.cloneFamily(this);
-097  }
-098
-099  @Override
-100  public int getFamilyOffset() {
-101return 0;
-102  }
-103
-104  @Override
-105  public byte getFamilyLength() {
-106return 
getFamilyLength(getFamilyLengthPosition());
-107  }
-108
-109  private int getFamilyLengthPosition() 
{
-110return this.offset + 
KeyValue.ROW_KEY_OFFSET
-111+ getRowLen();
-112  }
-113
-114  private byte getFamilyLength(int 
famLenPos) {
-115return 
ByteBufferUtils.toByte(this.buf, famLenPos);
-116  }
-117
-118  @Override
-119  public byte[] getQualifierArray() {
-120return 
CellUtil.cloneQualifier(this);
-121  }
-122
-123  @Override
-124  public int getQualifierOffset() {
-125return 0;
-126  }
-127
-128  @Override
-129  public int getQualifierLength() {
-130return 
getQualifierLength(getRowLength(), getFamilyLength());
-131  }
-132
-133  private int getQualifierLength(int 
rlength, int 

[15/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html
index f8eace7..66b6656 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.SamePrefixComparator.html
@@ -27,2569 +27,2540 @@
 019 */
 020package org.apache.hadoop.hbase;
 021
-022import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
-023import static 
org.apache.hadoop.hbase.util.Bytes.len;
-024
-025import java.io.DataInput;
-026import java.io.DataOutput;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.nio.ByteBuffer;
-030import java.util.ArrayList;
-031import java.util.Arrays;
-032import java.util.HashMap;
-033import java.util.Iterator;
-034import java.util.List;
-035import java.util.Map;
-036import java.util.Optional;
-037
-038import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-039import 
org.apache.hadoop.hbase.util.Bytes;
-040import 
org.apache.hadoop.hbase.util.ClassSize;
-041import 
org.apache.hadoop.io.RawComparator;
-042import 
org.apache.yetus.audience.InterfaceAudience;
-043import org.slf4j.Logger;
-044import org.slf4j.LoggerFactory;
-045
-046import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-047
-048/**
-049 * An HBase Key/Value. This is the 
fundamental HBase Type.
+022import static 
org.apache.hadoop.hbase.util.Bytes.len;
+023
+024import java.io.DataInput;
+025import java.io.DataOutput;
+026import java.io.IOException;
+027import java.io.OutputStream;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Arrays;
+031import java.util.HashMap;
+032import java.util.Iterator;
+033import java.util.List;
+034import java.util.Map;
+035import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+036import 
org.apache.hadoop.hbase.util.Bytes;
+037import 
org.apache.hadoop.hbase.util.ClassSize;
+038import 
org.apache.hadoop.io.RawComparator;
+039import 
org.apache.yetus.audience.InterfaceAudience;
+040import org.slf4j.Logger;
+041import org.slf4j.LoggerFactory;
+042
+043import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+044
+045/**
+046 * An HBase Key/Value. This is the 
fundamental HBase Type.
+047 * p
+048 * HBase applications and users should 
use the Cell interface and avoid directly using KeyValue and
+049 * member functions not defined in 
Cell.
 050 * p
-051 * HBase applications and users should 
use the Cell interface and avoid directly using KeyValue and
-052 * member functions not defined in 
Cell.
-053 * p
-054 * If being used client-side, the primary 
methods to access individual fields are
-055 * {@link #getRowArray()}, {@link 
#getFamilyArray()}, {@link #getQualifierArray()},
-056 * {@link #getTimestamp()}, and {@link 
#getValueArray()}. These methods allocate new byte arrays
-057 * and return copies. Avoid their use 
server-side.
-058 * p
-059 * Instances of this class are immutable. 
They do not implement Comparable but Comparators are
-060 * provided. Comparators change with 
context, whether user table or a catalog table comparison. Its
-061 * critical you use the appropriate 
comparator. There are Comparators for normal HFiles, Meta's
-062 * Hfiles, and bloom filter keys.
-063 * p
-064 * KeyValue wraps a byte array and takes 
offsets and lengths into passed array at where to start
-065 * interpreting the content as KeyValue. 
The KeyValue format inside a byte array is:
-066 * codelt;keylengthgt; 
lt;valuelengthgt; lt;keygt; 
lt;valuegt;/code Key is further
-067 * decomposed as: 
codelt;rowlengthgt; lt;rowgt; 
lt;columnfamilylengthgt;
-068 * lt;columnfamilygt; 
lt;columnqualifiergt;
-069 * lt;timestampgt; 
lt;keytypegt;/code The coderowlength/code 
maximum is
-070 * 
codeShort.MAX_SIZE/code, column family length maximum is 
codeByte.MAX_SIZE/code, and
-071 * column qualifier + key length must be 
lt; codeInteger.MAX_SIZE/code. The column does not
-072 * contain the family/qualifier 
delimiter, {@link #COLUMN_FAMILY_DELIMITER}br
-073 * KeyValue can optionally contain Tags. 
When it contains tags, it is added in the byte array after
-074 * the value part. The format for this 
part is: 
codelt;tagslengthgt;lt;tagsbytesgt;/code.
-075 * codetagslength/code 
maximum is codeShort.MAX_SIZE/code. The 
codetagsbytes/code
-076 * contain one or more tags where as each 
tag is of the form
-077 * 
codelt;taglengthgt;lt;tagtypegt;lt;tagbytesgt;/code.
 codetagtype/code is one byte
-078 * and codetaglength/code 
maximum is codeShort.MAX_SIZE/code and it includes 1 byte 
type
-079 * length and actual tag bytes length.
-080 */
-081@InterfaceAudience.Private
-082public class KeyValue 

[26/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
index a2c58d5..41b74b8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/encoding/BufferedDataBlockEncoder.OffheapDecodedCell.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-protected static class BufferedDataBlockEncoder.OffheapDecodedCell
+protected static class BufferedDataBlockEncoder.OffheapDecodedCell
 extends ByteBufferCell
 implements ExtendedCell
 
@@ -368,101 +368,89 @@ implements getSerializedSize(booleanwithTags)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalTag
-getTag(bytetype)
-Returns the specific tag of the given type
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
-getTags()
-Creates a list of tags in the current cell
-
-
-
 byte[]
 getTagsArray()
 Contiguous raw bytes representing tags that may start at 
any index in the containing array.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
 getTagsByteBuffer()
 
-
+
 int
 getTagsLength()
 HBase internally uses 2 bytes to store tags length in 
Cell.
 
 
-
+
 int
 getTagsOffset()
 
-
+
 int
 getTagsPosition()
 
-
+
 long
 getTimestamp()
 
-
+
 byte
 getTypeByte()
 
-
+
 byte[]
 getValueArray()
 Contiguous raw bytes that may start at any index in the 
containing array.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
 getValueByteBuffer()
 
-
+
 int
 getValueLength()
 
-
+
 int
 getValueOffset()
 
-
+
 int
 getValuePosition()
 
-
+
 long
 heapSize()
 
-
+
 void
 setSequenceId(longseqId)
 Sets with the given seqId.
 
 
-
+
 void
 setTimestamp(byte[]ts)
 Sets with the given timestamp.
 
 
-
+
 void
 setTimestamp(longts)
 Sets with the given timestamp.
 
 
-
+
 void
 write(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferbuf,
  intoffset)
 Write this Cell into the given buf's offset in a KeyValue format.
 
 
-
+
 int
 write(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamout,
  booleanwithTags)
@@ -489,7 +477,7 @@ implements 
 
 Methods inherited from interfaceorg.apache.hadoop.hbase.RawCell
-checkForTagsLength,
 cloneTags
+checkForTagsLength,
 cloneTags,
 getTag,
 getTags
 
 
 
@@ -511,7 +499,7 @@ implements 
 
 FIXED_OVERHEAD
-private static finallong FIXED_OVERHEAD
+private static finallong FIXED_OVERHEAD
 
 
 
@@ -520,7 +508,7 @@ implements 
 
 keyBuffer
-privatehttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer keyBuffer
+privatehttp://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer keyBuffer
 
 
 
@@ -529,7 +517,7 @@ implements 
 
 rowLength
-privateshort rowLength
+privateshort rowLength
 
 
 
@@ -538,7 +526,7 @@ implements 
 
 familyOffset
-privateint familyOffset
+privateint familyOffset
 
 
 
@@ -547,7 +535,7 @@ implements 
 
 familyLength
-privatebyte familyLength
+privatebyte familyLength
 
 
 
@@ -556,7 +544,7 @@ implements 
 
 qualifierOffset
-privateint qualifierOffset
+privateint qualifierOffset
 
 
 
@@ -565,7 +553,7 @@ implements 
 
 qualifierLength
-privateint qualifierLength
+privateint 

[10/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
index 3400507..2baa140 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowByteBufferCell.html
@@ -28,3034 +28,2926 @@
 020import static 
org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
 021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
 022
-023import 
com.google.common.annotations.VisibleForTesting;
-024
-025import java.io.DataOutput;
-026import java.io.DataOutputStream;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.math.BigDecimal;
-030import java.nio.ByteBuffer;
-031import java.util.ArrayList;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Optional;
-035
-036import 
org.apache.hadoop.hbase.KeyValue.Type;
-037import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-038import 
org.apache.hadoop.hbase.io.HeapSize;
-039import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-040import 
org.apache.hadoop.hbase.io.util.Dictionary;
-041import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-042import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-043import 
org.apache.hadoop.hbase.util.ByteRange;
-044import 
org.apache.hadoop.hbase.util.Bytes;
-045import 
org.apache.hadoop.hbase.util.ClassSize;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047
-048
-049/**
-050 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
-051 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
-052 */
-053@InterfaceAudience.Private
-054public final class PrivateCellUtil {
-055
-056  /**
-057   * Private constructor to keep this 
class from being instantiated.
-058   */
-059  private PrivateCellUtil() {
-060  }
+023import java.io.DataOutput;
+024import java.io.DataOutputStream;
+025import java.io.IOException;
+026import java.io.OutputStream;
+027import java.math.BigDecimal;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Iterator;
+031import java.util.List;
+032import java.util.Optional;
+033import 
org.apache.hadoop.hbase.KeyValue.Type;
+034import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
+035import 
org.apache.hadoop.hbase.io.HeapSize;
+036import 
org.apache.hadoop.hbase.io.TagCompressionContext;
+037import 
org.apache.hadoop.hbase.io.util.Dictionary;
+038import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+039import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+040import 
org.apache.hadoop.hbase.util.ByteRange;
+041import 
org.apache.hadoop.hbase.util.Bytes;
+042import 
org.apache.hadoop.hbase.util.ClassSize;
+043import 
org.apache.yetus.audience.InterfaceAudience;
+044
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+046
+047/**
+048 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
+049 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
+050 */
+051@InterfaceAudience.Private
+052public final class PrivateCellUtil {
+053
+054  /**
+055   * Private constructor to keep this 
class from being instantiated.
+056   */
+057  private PrivateCellUtil() {
+058  }
+059
+060  /*** ByteRange 
***/
 061
-062  /*** ByteRange 
***/
-063
-064  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-065return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-066  }
-067
-068  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-069return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-070  }
-071
-072  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-073return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-074  cell.getQualifierLength());
-075  }
-076
-077  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-078return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-079  }
-080
-081  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-082return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-083  }
+062  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
+063return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
+064  }
+065
+066  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
+067return 

[31/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/SizeCachedNoTagsKeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/SizeCachedNoTagsKeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/SizeCachedNoTagsKeyValue.html
index 4400c9e..2b95af0 100644
--- a/devapidocs/org/apache/hadoop/hbase/SizeCachedNoTagsKeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/SizeCachedNoTagsKeyValue.html
@@ -248,7 +248,7 @@ extends 
 
 Methods inherited from classorg.apache.hadoop.hbase.KeyValue
-checkParameters,
 clone, 
create,
 create,
 createKeyOnly,
 deepClone,
 equals,
 getBuffer,
 getDelimiter,
 getDelimiterInReverse, getFamilyArray,
 getFamilyLength,
 getFamilyLength,
 getFamilyOffset,
 getKey, 
getKeyDataStructureSize,
 getKeyOffset,
 getKeyString,
 getKeyValueDataStructureSize,
 getKeyValueDataStructureSize,
 getKeyValueDataStructureSize,
 getLength,
 getOffset,
 getQualifierArray,
 getQualifierLength,
 getQualifierOffset,
 getRowArray,
 getRowOffset,
 getSeque
 nceId, getTag,
 getTags, 
getTagsArray,
 getTagsOffset,
 getTimestamp,
 getTimestamp,
 getTimestampOffset,
 getTypeByte,
 getValueArray,
 getValueLength,
 getValueOffset, hashCode,
 humanReadableTimestamp,
 isLatestTimestamp,
 keyToString,
 keyToString,
 oswrite,
 setSequenceId,
 setTimestamp,
 setTimestamp,
 shallowCopy,
 toString,
 toStringMap,
 updateLatestStamp,
 write,
 write,
 writeByteArray
+checkParameters,
 clone, 
create,
 create,
 createKeyOnly,
 deepClone,
 equals,
 getBuffer,
 getDelimiter,
 getDelimiterInReverse, getFamilyArray,
 getFamilyLength,
 getFamilyLength,
 getFamilyOffset,
 getKey, 
getKeyDataStructureSize,
 getKeyOffset,
 getKeyString,
 getKeyValueDataStructureSize,
 getKeyValueDataStructureSize,
 getKeyValueDataStructureSize,
 getLength,
 getOffset,
 getQualifierArray,
 getQualifierLength,
 getQualifierOffset,
 getRowArray,
 getRowOffset,
 getSeque
 nceId, getTagsArray,
 getTagsOffset,
 getTimestamp,
 getTimestamp,
 getTimestampOffset,
 getTypeByte,
 getValueArray,
 getValueLength,
 getValueOffset,
 hashCode,
 humanReadableTimestamp, 
isLatestTimestamp,
 keyToString,
 keyToString,
 oswrite,
 setSequenceId,
 setTimestamp,
 setTimestamp,
 shallowCopy,
 toString,
  toStringMap,
 updateLatestStamp,
 write,
 write,
 writeByteArray
 
 
 
@@ -269,7 +269,7 @@ extends 
 
 Methods inherited from interfaceorg.apache.hadoop.hbase.RawCell
-checkForTagsLength,
 cloneTags
+checkForTagsLength,
 cloneTags,
 getTag,
 getTags
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 7d49754..d7fec8e 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -167,10 +167,10 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.backup.BackupInfo.BackupPhase
 org.apache.hadoop.hbase.backup.BackupType
-org.apache.hadoop.hbase.backup.BackupInfo.BackupState
 org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand
+org.apache.hadoop.hbase.backup.BackupInfo.BackupPhase
+org.apache.hadoop.hbase.backup.BackupInfo.BackupState
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/class-use/RegionMetrics.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/RegionMetrics.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/RegionMetrics.html
index 868db79..e224d94 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/RegionMetrics.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/RegionMetrics.html
@@ -235,7 +235,7 @@
 
 
 
-ServerMetricsImpl(ServerNameserverName,
+ServerMetricsImpl(ServerNameserverName,
  longrequestCountPerSecond,
  longrequestCount,
  SizeusedHeapSize,
@@ -244,7 +244,7 @@
  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationLoadSourcesources,
  ReplicationLoadSinksink,
  http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in 

[50/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/apidocs/org/apache/hadoop/hbase/ServerMetrics.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/ServerMetrics.html 
b/apidocs/org/apache/hadoop/hbase/ServerMetrics.html
index 170d5af..3650a61 100644
--- a/apidocs/org/apache/hadoop/hbase/ServerMetrics.html
+++ b/apidocs/org/apache/hadoop/hbase/ServerMetrics.html
@@ -106,7 +106,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public interface ServerMetrics
+public interface ServerMetrics
 This class is used for exporting current state of load on a 
RegionServer.
 
 
@@ -127,7 +127,7 @@ public interface Method and Description
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getCoprocessorNames()
 Return the RegionServer-level and Region-level 
coprocessors
 
@@ -201,7 +201,7 @@ public interface 
 
 getServerName
-ServerNamegetServerName()
+ServerNamegetServerName()
 
 
 
@@ -210,7 +210,7 @@ public interface 
 
 getRequestCountPerSecond
-longgetRequestCountPerSecond()
+longgetRequestCountPerSecond()
 
 Returns:
 the number of requests per second.
@@ -223,7 +223,7 @@ public interface 
 
 getRequestCount
-longgetRequestCount()
+longgetRequestCount()
 
 Returns:
 total Number of requests from the start of the region server.
@@ -236,7 +236,7 @@ public interface 
 
 getUsedHeapSize
-SizegetUsedHeapSize()
+SizegetUsedHeapSize()
 
 Returns:
 the amount of used heap
@@ -249,7 +249,7 @@ public interface 
 
 getMaxHeapSize
-SizegetMaxHeapSize()
+SizegetMaxHeapSize()
 
 Returns:
 the maximum allowable size of the heap
@@ -262,7 +262,7 @@ public interface 
 
 getInfoServerPort
-intgetInfoServerPort()
+intgetInfoServerPort()
 
 
 
@@ -271,7 +271,7 @@ public interface 
 
 getReplicationLoadSourceList
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationLoadSourcegetReplicationLoadSourceList()
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListReplicationLoadSourcegetReplicationLoadSourceList()
 Call directly from client such as hbase shell
 
 Returns:
@@ -286,7 +286,7 @@ public interface 
 getReplicationLoadSink
 @Nullable
-ReplicationLoadSinkgetReplicationLoadSink()
+ReplicationLoadSinkgetReplicationLoadSink()
 Call directly from client such as hbase shell
 
 Returns:
@@ -300,7 +300,7 @@ public interface 
 
 getRegionMetrics
-http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],RegionMetricsgetRegionMetrics()
+http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Mapbyte[],RegionMetricsgetRegionMetrics()
 
 Returns:
 region load metrics
@@ -313,11 +313,11 @@ public interface 
 
 getCoprocessorNames
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetCoprocessorNames()
+http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Sethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetCoprocessorNames()
 Return the RegionServer-level and Region-level 
coprocessors
 
 Returns:
-string list of loaded RegionServer-level and Region-level coprocessors
+string set of loaded RegionServer-level and Region-level coprocessors
 
 
 
@@ -327,7 +327,7 @@ public interface 
 
 getReportTimestamp
-longgetReportTimestamp()
+longgetReportTimestamp()
 
 Returns:
 the timestamp (server side) of generating this metrics
@@ -340,7 +340,7 @@ public interface 
 
 getLastReportTimestamp
-longgetLastReportTimestamp()
+longgetLastReportTimestamp()
 
 Returns:
 the last timestamp (server side) of generating this metrics

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/apidocs/org/apache/hadoop/hbase/client/Mutation.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Mutation.html 
b/apidocs/org/apache/hadoop/hbase/client/Mutation.html
index c71bd38..46e55f2 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Mutation.html
+++ 

[19/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/IndividualBytesFieldCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/IndividualBytesFieldCell.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/IndividualBytesFieldCell.html
index 242f82e..d6a8ea3 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/IndividualBytesFieldCell.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/IndividualBytesFieldCell.html
@@ -26,319 +26,286 @@
 018
 019package org.apache.hadoop.hbase;
 020
-021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
-022
-023import java.util.ArrayList;
-024import java.util.Iterator;
-025import java.util.List;
-026import java.util.Optional;
-027
-028import 
org.apache.commons.lang3.ArrayUtils;
-029import 
org.apache.hadoop.hbase.util.Bytes;
-030import 
org.apache.hadoop.hbase.util.ClassSize;
-031import 
org.apache.yetus.audience.InterfaceAudience;
-032
-033@InterfaceAudience.Private
-034public class IndividualBytesFieldCell 
implements ExtendedCell {
-035
-036  private static final long 
FIXED_OVERHEAD = ClassSize.align(  // do alignment(padding gap)
-037ClassSize.OBJECT  // 
object header
-038  + KeyValue.TIMESTAMP_TYPE_SIZE  // 
timestamp and type
-039  + Bytes.SIZEOF_LONG // 
sequence id
-040  + 5 * ClassSize.REFERENCE); // 
references to all byte arrays: row, family, qualifier, value, tags
-041
-042  // The following fields are backed by 
individual byte arrays
-043  private final byte[] row;
-044  private final int rOffset;
-045  private final int rLength;
-046  private final byte[] family;
-047  private final int fOffset;
-048  private final int fLength;
-049  private final byte[] qualifier;
-050  private final int qOffset;
-051  private final int qLength;
-052  private final byte[] value;
-053  private final int vOffset;
-054  private final int vLength;
-055  private final byte[] tags;  // A byte 
array, rather than an array of org.apache.hadoop.hbase.Tag
-056  private final int tagsOffset;
-057  private final int tagsLength;
-058
-059  // Other fields
-060  private long timestamp;
-061  private final byte type;  // A byte, 
rather than org.apache.hadoop.hbase.KeyValue.Type
-062  private long seqId;
-063
-064  public IndividualBytesFieldCell(byte[] 
row, byte[] family, byte[] qualifier,
-065  long 
timestamp, KeyValue.Type type,  byte[] value) {
-066this(row, family, qualifier, 
timestamp, type, 0L /* sequence id */, value, null /* tags */);
-067  }
-068
-069  public IndividualBytesFieldCell(byte[] 
row, byte[] family, byte[] qualifier,
-070  long 
timestamp, KeyValue.Type type, long seqId, byte[] value, byte[] tags) {
-071this(row, 0, 
ArrayUtils.getLength(row),
-072family, 0, 
ArrayUtils.getLength(family),
-073qualifier, 0, 
ArrayUtils.getLength(qualifier),
-074timestamp, type, seqId,
-075value, 0, 
ArrayUtils.getLength(value),
-076tags, 0, 
ArrayUtils.getLength(tags));
-077  }
+021import 
org.apache.commons.lang3.ArrayUtils;
+022import 
org.apache.hadoop.hbase.util.Bytes;
+023import 
org.apache.hadoop.hbase.util.ClassSize;
+024import 
org.apache.yetus.audience.InterfaceAudience;
+025
+026@InterfaceAudience.Private
+027public class IndividualBytesFieldCell 
implements ExtendedCell {
+028
+029  private static final long 
FIXED_OVERHEAD = ClassSize.align(  // do alignment(padding gap)
+030ClassSize.OBJECT  // 
object header
+031  + KeyValue.TIMESTAMP_TYPE_SIZE  // 
timestamp and type
+032  + Bytes.SIZEOF_LONG // 
sequence id
+033  + 5 * ClassSize.REFERENCE); // 
references to all byte arrays: row, family, qualifier, value, tags
+034
+035  // The following fields are backed by 
individual byte arrays
+036  private final byte[] row;
+037  private final int rOffset;
+038  private final int rLength;
+039  private final byte[] family;
+040  private final int fOffset;
+041  private final int fLength;
+042  private final byte[] qualifier;
+043  private final int qOffset;
+044  private final int qLength;
+045  private final byte[] value;
+046  private final int vOffset;
+047  private final int vLength;
+048  private final byte[] tags;  // A byte 
array, rather than an array of org.apache.hadoop.hbase.Tag
+049  private final int tagsOffset;
+050  private final int tagsLength;
+051
+052  // Other fields
+053  private long timestamp;
+054  private final byte type;  // A byte, 
rather than org.apache.hadoop.hbase.KeyValue.Type
+055  private long seqId;
+056
+057  public IndividualBytesFieldCell(byte[] 
row, byte[] family, byte[] qualifier,
+058  long 
timestamp, KeyValue.Type type,  byte[] value) {
+059this(row, family, qualifier, 
timestamp, type, 0L /* sequence id */, value, 

[07/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColCell.html
index 3400507..2baa140 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColCell.html
@@ -28,3034 +28,2926 @@
 020import static 
org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
 021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
 022
-023import 
com.google.common.annotations.VisibleForTesting;
-024
-025import java.io.DataOutput;
-026import java.io.DataOutputStream;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.math.BigDecimal;
-030import java.nio.ByteBuffer;
-031import java.util.ArrayList;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Optional;
-035
-036import 
org.apache.hadoop.hbase.KeyValue.Type;
-037import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-038import 
org.apache.hadoop.hbase.io.HeapSize;
-039import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-040import 
org.apache.hadoop.hbase.io.util.Dictionary;
-041import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-042import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-043import 
org.apache.hadoop.hbase.util.ByteRange;
-044import 
org.apache.hadoop.hbase.util.Bytes;
-045import 
org.apache.hadoop.hbase.util.ClassSize;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047
-048
-049/**
-050 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
-051 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
-052 */
-053@InterfaceAudience.Private
-054public final class PrivateCellUtil {
-055
-056  /**
-057   * Private constructor to keep this 
class from being instantiated.
-058   */
-059  private PrivateCellUtil() {
-060  }
+023import java.io.DataOutput;
+024import java.io.DataOutputStream;
+025import java.io.IOException;
+026import java.io.OutputStream;
+027import java.math.BigDecimal;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Iterator;
+031import java.util.List;
+032import java.util.Optional;
+033import 
org.apache.hadoop.hbase.KeyValue.Type;
+034import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
+035import 
org.apache.hadoop.hbase.io.HeapSize;
+036import 
org.apache.hadoop.hbase.io.TagCompressionContext;
+037import 
org.apache.hadoop.hbase.io.util.Dictionary;
+038import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+039import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+040import 
org.apache.hadoop.hbase.util.ByteRange;
+041import 
org.apache.hadoop.hbase.util.Bytes;
+042import 
org.apache.hadoop.hbase.util.ClassSize;
+043import 
org.apache.yetus.audience.InterfaceAudience;
+044
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+046
+047/**
+048 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
+049 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
+050 */
+051@InterfaceAudience.Private
+052public final class PrivateCellUtil {
+053
+054  /**
+055   * Private constructor to keep this 
class from being instantiated.
+056   */
+057  private PrivateCellUtil() {
+058  }
+059
+060  /*** ByteRange 
***/
 061
-062  /*** ByteRange 
***/
-063
-064  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-065return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-066  }
-067
-068  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-069return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-070  }
-071
-072  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-073return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-074  cell.getQualifierLength());
-075  }
-076
-077  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-078return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-079  }
-080
-081  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-082return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-083  }
+062  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
+063return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
+064  }
+065
+066  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
+067return 
range.set(cell.getFamilyArray(), 

[39/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/NoTagsByteBufferKeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/NoTagsByteBufferKeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/NoTagsByteBufferKeyValue.html
index d0f10a4..e843f87 100644
--- a/devapidocs/org/apache/hadoop/hbase/NoTagsByteBufferKeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/NoTagsByteBufferKeyValue.html
@@ -246,7 +246,7 @@ extends 
 
 Methods inherited from classorg.apache.hadoop.hbase.ByteBufferKeyValue
-equals,
 getBuffer,
 getFamilyArray,
 getFamilyByteBuffer,
 getFamilyLength,
 getFamilyOffset,
 getFamilyPosition,
 getOffset,
 getQualifierArray,
 getQualifierByteBuffer,
 getQualifierLength,
 getQualifierOffset,
 getQualifierPosition,
 getRowArray,
 getRowByteBuffer,
 getRowLength,
 getRowOffset,
 getRowPosition,
 getSequenceId,
 getTag,
 getTags,
 getTagsByteBuffer,
 getTagsOffset,
 getTagsPosition,
 getTimestamp,
 getTypeByte,
 getValueArray,
 getValueByteBuffer, href="../../../../org/apache/hadoop/hbase/ByteBufferKeyValue.html#getValueLength--">getValueLength,
 > href="../../../../org/apache/hadoop/hbase/ByteBufferKeyValue.html#getValueOffset--">getValueOffset,
 > href="../../../../org/apache/hadoop/hbase/ByteBufferKeyValue.html#getValuePosition--">getValuePosition,
 > href="../../../../org/apache/hadoop/hbase/ByteBufferKeyValue.html#hashCode--">hashCode,
 > href="../../../../org/apache/hadoop/hbase/ByteBufferKeyValue.html#heapSize--">heapSize,
 > href="../../../../org/apache/hadoop/hbase/ByteBufferKeyValue.html#setSequenceId-long-">setSequenceId,
 > href="../../../../org/apache/hadoop/hbase/ByteBufferKeyValue.html#setTimestamp-byte:A-">setTimestamp,
 > href="../../../../org/apache/hadoop/hbase/ByteBufferKeyValue.html#setTimestamp-long-">setTimestamp,
 > href="../../../../org/apache/hadoop/hbase/ByteBufferKeyValue.html#toString--">toString,
 > write, write
+equals,
 getBuffer,
 getFamilyArray,
 getFamilyByteBuffer,
 getFamilyLength,
 getFamilyOffset,
 getFamilyPosition,
 getOffset,
 getQualifierArray,
 getQualifierByteBuffer,
 getQualifierLength,
 getQualifierOffset,
 getQualifierPosition,
 getRowArray,
 getRowByteBuffer,
 getRowLength,
 getRowOffset,
 getRowPosition,
 getSequenceId,
 getTagsByteBuffer,
 getTagsOffset,
 getTagsPosition,
 getTimestamp,
 getTypeByte,
 getValueArray,
 getValueByteBuffer,
 getValueLength,
 getValueOffset, getValuePosition,
 hashCode,
 heapSize,
 setSequenceId,
 setTimestamp,
 setTimestamp,
 toString,
 write,
 write
 
 
 
@@ -267,7 +267,7 @@ extends 
 
 Methods inherited from interfaceorg.apache.hadoop.hbase.RawCell
-checkForTagsLength,
 cloneTags
+checkForTagsLength,
 cloneTags,
 getTag,
 getTags
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/NoTagsKeyValue.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/NoTagsKeyValue.html 
b/devapidocs/org/apache/hadoop/hbase/NoTagsKeyValue.html
index 6bf6c54..91d8081 100644
--- a/devapidocs/org/apache/hadoop/hbase/NoTagsKeyValue.html
+++ b/devapidocs/org/apache/hadoop/hbase/NoTagsKeyValue.html
@@ -239,7 +239,7 @@ extends KeyValue
-checkParameters,
 clone, 
create,
 create,
 createKeyOnly,
 equals,
 getBuffer,
 getDelimiter,
 getDelimiterInReverse,
 getFamilyArray, getFamilyLength,
 getFamilyLength,
 getFamilyOffset,
 getKey, 
getKeyDataStructureSize,
 getKeyLength,
 getKeyOffset,
 getKeyString,
 getKeyValueDataStructureSize,
 getKeyValueDataStructureSize,
 getKeyValueDataStructureSize,
 getLength,
 getOffset,
 getQualifierArray,
 getQualifierLength,
 getQualifierOffset,
 getRowArray,
 getRowLength,
 get
 RowOffset, getSequenceId,
 getTag,
 getTags, 
getTagsArray,
 getTagsOffset,
 getTimestamp,
 getTimestamp,
 getTimestampOffset,
 getTypeByte,
 getValueArray,
 getValueLength, getValueOffset,
 hashCode,
 heapSize,
 humanReadableTimestamp,
 isLatestTimestamp,
 keyToString,
 keyToString,
 oswrite,
 setSequenceId,
 setTimestamp, 
setTimestamp,
 shallowCopy,
 toString,
 toStringMap,
 updateLatestStamp,
 write,
 write,
 writeByteArray
+checkParameters,
 clone, 
create,
 create,
 createKeyOnly,
 equals,
 getBuffer,
 getDelimiter,
 getDelimiterInReverse,
 getFamilyArray, getFamilyLength,
 getFamilyLength,
 getFamilyOffset,
 getKey, 
getKeyDataStructureSize,
 getKeyLength,
 getKeyOffset,
 getKeyString,
 getKeyValueDataStructureSize,
 getKeyValueDataStructureSize,
 getKeyValueDataStructureSize,
 getLength,
 getOffset,
 getQualifierArray,
 getQualifierLength,
 getQualifierOffset,
 getRowArray,
 getRowLength,
 get
 RowOffset, getSequenceId,
 getTagsArray,
 getTagsOffset,
 getTimestamp,
 getTimestamp,
 getTimestampOffset,
 getTypeByte,
 getValueArray,
 getValueLength,
 getValueOffset,
 hashCode, heapSize,
 humanReadableTimestamp,
 isLatestTimestamp,
 keyToString,
 keyToString,
 oswrite,
 setSequenceId,
 

[24/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html
 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html
index cf9843c..d579a94 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.KeyValueStatsCollector.html
@@ -113,7 +113,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class HFilePrettyPrinter.KeyValueStatsCollector
+private static class HFilePrettyPrinter.KeyValueStatsCollector
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -263,7 +263,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 metricsRegistry
-private finalcom.codahale.metrics.MetricRegistry metricsRegistry
+private finalcom.codahale.metrics.MetricRegistry metricsRegistry
 
 
 
@@ -272,7 +272,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 metricsOutput
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/io/ByteArrayOutputStream.html?is-external=true;
 title="class or interface in java.io">ByteArrayOutputStream metricsOutput
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/io/ByteArrayOutputStream.html?is-external=true;
 title="class or interface in java.io">ByteArrayOutputStream metricsOutput
 
 
 
@@ -281,7 +281,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 simpleReporter
-private finalHFilePrettyPrinter.SimpleReporter simpleReporter
+private finalHFilePrettyPrinter.SimpleReporter simpleReporter
 
 
 
@@ -290,7 +290,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 keyLen
-com.codahale.metrics.Histogram keyLen
+com.codahale.metrics.Histogram keyLen
 
 
 
@@ -299,7 +299,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 valLen
-com.codahale.metrics.Histogram valLen
+com.codahale.metrics.Histogram valLen
 
 
 
@@ -308,7 +308,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 rowSizeBytes
-com.codahale.metrics.Histogram rowSizeBytes
+com.codahale.metrics.Histogram rowSizeBytes
 
 
 
@@ -317,7 +317,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 rowSizeCols
-com.codahale.metrics.Histogram rowSizeCols
+com.codahale.metrics.Histogram rowSizeCols
 
 
 
@@ -326,7 +326,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 curRowBytes
-long curRowBytes
+long curRowBytes
 
 
 
@@ -335,7 +335,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 curRowCols
-long curRowCols
+long curRowCols
 
 
 
@@ -344,7 +344,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 biggestRow
-byte[] biggestRow
+byte[] biggestRow
 
 
 
@@ -353,7 +353,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 prevCell
-privateCell prevCell
+privateCell prevCell
 
 
 
@@ -362,7 +362,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 maxRowBytes
-privatelong maxRowBytes
+privatelong maxRowBytes
 
 
 
@@ -371,7 +371,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 curRowKeyLength
-privatelong curRowKeyLength
+privatelong curRowKeyLength
 
 
 
@@ -388,7 +388,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 KeyValueStatsCollector
-privateKeyValueStatsCollector()
+privateKeyValueStatsCollector()
 
 
 
@@ -405,7 +405,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 collect
-publicvoidcollect(Cellcell)
+publicvoidcollect(Cellcell)
 
 
 
@@ -414,7 +414,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 collectRow
-privatevoidcollectRow()
+privatevoidcollectRow()
 
 
 
@@ -423,7 +423,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 finish
-publicvoidfinish()
+publicvoidfinish()
 
 
 
@@ -432,7 +432,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 toString
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
 
 Overrides:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--;
 title="class or interface in java.lang">toStringin 

[27/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/client/example/HttpProxyExample.RequestHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/example/HttpProxyExample.RequestHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/client/example/HttpProxyExample.RequestHandler.html
index 3d37d41..5a14666 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/example/HttpProxyExample.RequestHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/example/HttpProxyExample.RequestHandler.html
@@ -132,7 +132,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static final class HttpProxyExample.RequestHandler
+private static final class HttpProxyExample.RequestHandler
 extends 
org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHandlerorg.apache.hadoop.hbase.shaded.io.netty.handler.codec.http.FullHttpRequest
 
 
@@ -302,7 +302,7 @@ extends 
org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHand
 
 
 conn
-private finalAsyncConnection conn
+private finalAsyncConnection conn
 
 
 
@@ -311,7 +311,7 @@ extends 
org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHand
 
 
 channelGroup
-private 
finalorg.apache.hadoop.hbase.shaded.io.netty.channel.group.ChannelGroup 
channelGroup
+private 
finalorg.apache.hadoop.hbase.shaded.io.netty.channel.group.ChannelGroup 
channelGroup
 
 
 
@@ -328,7 +328,7 @@ extends 
org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHand
 
 
 RequestHandler
-publicRequestHandler(AsyncConnectionconn,
+publicRequestHandler(AsyncConnectionconn,
   
org.apache.hadoop.hbase.shaded.io.netty.channel.group.ChannelGroupchannelGroup)
 
 
@@ -346,7 +346,7 @@ extends 
org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHand
 
 
 channelActive
-publicvoidchannelActive(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContextctx)
+publicvoidchannelActive(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContextctx)
 
 Specified by:
 channelActivein 
interfaceorg.apache.hadoop.hbase.shaded.io.netty.channel.ChannelInboundHandler
@@ -361,7 +361,7 @@ extends 
org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHand
 
 
 channelInactive
-publicvoidchannelInactive(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContextctx)
+publicvoidchannelInactive(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContextctx)
 
 Specified by:
 channelInactivein 
interfaceorg.apache.hadoop.hbase.shaded.io.netty.channel.ChannelInboundHandler
@@ -376,7 +376,7 @@ extends 
org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHand
 
 
 write
-privatevoidwrite(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContextctx,
+privatevoidwrite(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContextctx,

org.apache.hadoop.hbase.shaded.io.netty.handler.codec.http.HttpResponseStatusstatus,
http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">Optionalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringcontent)
 
@@ -387,7 +387,7 @@ extends 
org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHand
 
 
 parse
-privateHttpProxyExample.Paramsparse(org.apache.hadoop.hbase.shaded.io.netty.handler.codec.http.FullHttpRequestreq)
+privateHttpProxyExample.Paramsparse(org.apache.hadoop.hbase.shaded.io.netty.handler.codec.http.FullHttpRequestreq)
 
 
 
@@ -396,7 +396,7 @@ extends 
org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHand
 
 
 get
-privatevoidget(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContextctx,
+privatevoidget(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContextctx,
  
org.apache.hadoop.hbase.shaded.io.netty.handler.codec.http.FullHttpRequestreq)
 
 
@@ -406,7 +406,7 @@ extends 
org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHand
 
 
 put
-privatevoidput(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContextctx,
+privatevoidput(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContextctx,
  
org.apache.hadoop.hbase.shaded.io.netty.handler.codec.http.FullHttpRequestreq)
 
 
@@ -416,7 +416,7 @@ extends 
org.apache.hadoop.hbase.shaded.io.netty.channel.SimpleChannelInboundHand
 
 
 channelRead0
-protectedvoidchannelRead0(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContextctx,
+protectedvoidchannelRead0(org.apache.hadoop.hbase.shaded.io.netty.channel.ChannelHandlerContextctx,
 
org.apache.hadoop.hbase.shaded.io.netty.handler.codec.http.FullHttpRequestreq)
 
 Specified by:
@@ -430,7 +430,7 @@ extends 

[43/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 19938d7..9e2fae0 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -26,7 +26,7 @@ under the License.
 2007 - 2017 The Apache Software Foundation
 
   File: 3466,
- Errors: 19162,
+ Errors: 19152,
  Warnings: 0,
  Infos: 0
   
@@ -1105,7 +1105,7 @@ under the License.
   0
 
 
-  24
+  23
 
   
   
@@ -5459,7 +5459,7 @@ under the License.
   0
 
 
-  12
+  11
 
   
   
@@ -21265,7 +21265,7 @@ under the License.
   0
 
 
-  2
+  1
 
   
   
@@ -23589,7 +23589,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -24499,7 +24499,7 @@ under the License.
   0
 
 
-  15
+  14
 
   
   
@@ -32423,7 +32423,7 @@ under the License.
   0
 
 
-  20
+  18
 
   
   
@@ -33655,7 +33655,7 @@ under the License.
   0
 
 
-  39
+  38
 
   
   
@@ -46129,7 +46129,7 @@ under the License.
   0
 
 
-  2
+  0
 
   
   

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/coc.html
--
diff --git a/coc.html b/coc.html
index 2394f04..18b4ceb 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Code of Conduct Policy
@@ -380,7 +380,7 @@ email to mailto:priv...@hbase.apache.org;>the priv
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-26
+  Last Published: 
2017-12-27
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index 626f3f8..b2f9a42 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -679,7 +679,7 @@ Now your HBase server is running, start 
coding and build that next
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-26
+  Last Published: 
2017-12-27
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index cfd8a6d..a689320 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependencies
 
@@ -445,7 +445,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-26
+  Last Published: 
2017-12-27
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 85b9dfa..6026938 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Reactor Dependency Convergence
 
@@ -1082,7 +1082,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-26
+  Last Published: 
2017-12-27
 
 
 


hbase-site git commit: INFRA-10751 Empty commit

2017-12-27 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site d2b28a1a2 -> 7254d5f48


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/7254d5f4
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/7254d5f4
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/7254d5f4

Branch: refs/heads/asf-site
Commit: 7254d5f48a21c9725742de5fa7b26c9747324574
Parents: d2b28a1
Author: jenkins 
Authored: Wed Dec 27 15:19:39 2017 +
Committer: jenkins 
Committed: Wed Dec 27 15:19:39 2017 +

--

--




[48/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/apidocs/src-html/org/apache/hadoop/hbase/ServerMetrics.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/ServerMetrics.html 
b/apidocs/src-html/org/apache/hadoop/hbase/ServerMetrics.html
index 0e66fe5..1001ecc 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/ServerMetrics.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/ServerMetrics.html
@@ -28,74 +28,75 @@
 020import 
edu.umd.cs.findbugs.annotations.Nullable;
 021import java.util.List;
 022import java.util.Map;
-023import 
org.apache.hadoop.hbase.replication.ReplicationLoadSink;
-024import 
org.apache.hadoop.hbase.replication.ReplicationLoadSource;
-025import 
org.apache.yetus.audience.InterfaceAudience;
-026
-027/**
-028 * This class is used for exporting 
current state of load on a RegionServer.
-029 */
-030@InterfaceAudience.Public
-031public interface ServerMetrics {
-032
-033  ServerName getServerName();
-034  /**
-035   * @return the number of requests per 
second.
-036   */
-037  long getRequestCountPerSecond();
-038
-039  /**
-040   * @return total Number of requests 
from the start of the region server.
-041   */
-042  long getRequestCount();
-043
-044  /**
-045   * @return the amount of used heap
-046   */
-047  Size getUsedHeapSize();
-048
-049  /**
-050   * @return the maximum allowable size 
of the heap
-051   */
-052  Size getMaxHeapSize();
-053
-054  int getInfoServerPort();
-055
-056  /**
-057   * Call directly from client such as 
hbase shell
-058   * @return the list of 
ReplicationLoadSource
-059   */
-060  ListReplicationLoadSource 
getReplicationLoadSourceList();
-061
-062  /**
-063   * Call directly from client such as 
hbase shell
-064   * @return ReplicationLoadSink
-065   */
-066  @Nullable
-067  ReplicationLoadSink 
getReplicationLoadSink();
-068
-069  /**
-070   * @return region load metrics
-071   */
-072  Mapbyte[], RegionMetrics 
getRegionMetrics();
-073
-074  /**
-075   * Return the RegionServer-level and 
Region-level coprocessors
-076   * @return string list of loaded 
RegionServer-level and Region-level coprocessors
-077   */
-078  ListString 
getCoprocessorNames();
-079
-080  /**
-081   * @return the timestamp (server side) 
of generating this metrics
-082   */
-083  long getReportTimestamp();
-084
-085  /**
-086   * @return the last timestamp (server 
side) of generating this metrics
-087   */
-088  long getLastReportTimestamp();
-089
-090}
+023import java.util.Set;
+024import 
org.apache.hadoop.hbase.replication.ReplicationLoadSink;
+025import 
org.apache.hadoop.hbase.replication.ReplicationLoadSource;
+026import 
org.apache.yetus.audience.InterfaceAudience;
+027
+028/**
+029 * This class is used for exporting 
current state of load on a RegionServer.
+030 */
+031@InterfaceAudience.Public
+032public interface ServerMetrics {
+033
+034  ServerName getServerName();
+035  /**
+036   * @return the number of requests per 
second.
+037   */
+038  long getRequestCountPerSecond();
+039
+040  /**
+041   * @return total Number of requests 
from the start of the region server.
+042   */
+043  long getRequestCount();
+044
+045  /**
+046   * @return the amount of used heap
+047   */
+048  Size getUsedHeapSize();
+049
+050  /**
+051   * @return the maximum allowable size 
of the heap
+052   */
+053  Size getMaxHeapSize();
+054
+055  int getInfoServerPort();
+056
+057  /**
+058   * Call directly from client such as 
hbase shell
+059   * @return the list of 
ReplicationLoadSource
+060   */
+061  ListReplicationLoadSource 
getReplicationLoadSourceList();
+062
+063  /**
+064   * Call directly from client such as 
hbase shell
+065   * @return ReplicationLoadSink
+066   */
+067  @Nullable
+068  ReplicationLoadSink 
getReplicationLoadSink();
+069
+070  /**
+071   * @return region load metrics
+072   */
+073  Mapbyte[], RegionMetrics 
getRegionMetrics();
+074
+075  /**
+076   * Return the RegionServer-level and 
Region-level coprocessors
+077   * @return string set of loaded 
RegionServer-level and Region-level coprocessors
+078   */
+079  SetString 
getCoprocessorNames();
+080
+081  /**
+082   * @return the timestamp (server side) 
of generating this metrics
+083   */
+084  long getReportTimestamp();
+085
+086  /**
+087   * @return the last timestamp (server 
side) of generating this metrics
+088   */
+089  long getLastReportTimestamp();
+090
+091}
 
 
 



[17/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
index f8eace7..66b6656 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KeyOnlyKeyValue.html
@@ -27,2569 +27,2540 @@
 019 */
 020package org.apache.hadoop.hbase;
 021
-022import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
-023import static 
org.apache.hadoop.hbase.util.Bytes.len;
-024
-025import java.io.DataInput;
-026import java.io.DataOutput;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.nio.ByteBuffer;
-030import java.util.ArrayList;
-031import java.util.Arrays;
-032import java.util.HashMap;
-033import java.util.Iterator;
-034import java.util.List;
-035import java.util.Map;
-036import java.util.Optional;
-037
-038import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-039import 
org.apache.hadoop.hbase.util.Bytes;
-040import 
org.apache.hadoop.hbase.util.ClassSize;
-041import 
org.apache.hadoop.io.RawComparator;
-042import 
org.apache.yetus.audience.InterfaceAudience;
-043import org.slf4j.Logger;
-044import org.slf4j.LoggerFactory;
-045
-046import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-047
-048/**
-049 * An HBase Key/Value. This is the 
fundamental HBase Type.
+022import static 
org.apache.hadoop.hbase.util.Bytes.len;
+023
+024import java.io.DataInput;
+025import java.io.DataOutput;
+026import java.io.IOException;
+027import java.io.OutputStream;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Arrays;
+031import java.util.HashMap;
+032import java.util.Iterator;
+033import java.util.List;
+034import java.util.Map;
+035import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+036import 
org.apache.hadoop.hbase.util.Bytes;
+037import 
org.apache.hadoop.hbase.util.ClassSize;
+038import 
org.apache.hadoop.io.RawComparator;
+039import 
org.apache.yetus.audience.InterfaceAudience;
+040import org.slf4j.Logger;
+041import org.slf4j.LoggerFactory;
+042
+043import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+044
+045/**
+046 * An HBase Key/Value. This is the 
fundamental HBase Type.
+047 * p
+048 * HBase applications and users should 
use the Cell interface and avoid directly using KeyValue and
+049 * member functions not defined in 
Cell.
 050 * p
-051 * HBase applications and users should 
use the Cell interface and avoid directly using KeyValue and
-052 * member functions not defined in 
Cell.
-053 * p
-054 * If being used client-side, the primary 
methods to access individual fields are
-055 * {@link #getRowArray()}, {@link 
#getFamilyArray()}, {@link #getQualifierArray()},
-056 * {@link #getTimestamp()}, and {@link 
#getValueArray()}. These methods allocate new byte arrays
-057 * and return copies. Avoid their use 
server-side.
-058 * p
-059 * Instances of this class are immutable. 
They do not implement Comparable but Comparators are
-060 * provided. Comparators change with 
context, whether user table or a catalog table comparison. Its
-061 * critical you use the appropriate 
comparator. There are Comparators for normal HFiles, Meta's
-062 * Hfiles, and bloom filter keys.
-063 * p
-064 * KeyValue wraps a byte array and takes 
offsets and lengths into passed array at where to start
-065 * interpreting the content as KeyValue. 
The KeyValue format inside a byte array is:
-066 * codelt;keylengthgt; 
lt;valuelengthgt; lt;keygt; 
lt;valuegt;/code Key is further
-067 * decomposed as: 
codelt;rowlengthgt; lt;rowgt; 
lt;columnfamilylengthgt;
-068 * lt;columnfamilygt; 
lt;columnqualifiergt;
-069 * lt;timestampgt; 
lt;keytypegt;/code The coderowlength/code 
maximum is
-070 * 
codeShort.MAX_SIZE/code, column family length maximum is 
codeByte.MAX_SIZE/code, and
-071 * column qualifier + key length must be 
lt; codeInteger.MAX_SIZE/code. The column does not
-072 * contain the family/qualifier 
delimiter, {@link #COLUMN_FAMILY_DELIMITER}br
-073 * KeyValue can optionally contain Tags. 
When it contains tags, it is added in the byte array after
-074 * the value part. The format for this 
part is: 
codelt;tagslengthgt;lt;tagsbytesgt;/code.
-075 * codetagslength/code 
maximum is codeShort.MAX_SIZE/code. The 
codetagsbytes/code
-076 * contain one or more tags where as each 
tag is of the form
-077 * 
codelt;taglengthgt;lt;tagtypegt;lt;tagbytesgt;/code.
 codetagtype/code is one byte
-078 * and codetaglength/code 
maximum is codeShort.MAX_SIZE/code and it includes 1 byte 
type
-079 * length and actual tag bytes length.
-080 */
-081@InterfaceAudience.Private
-082public class KeyValue implements 
ExtendedCell {

[14/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.Type.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.Type.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.Type.html
index f8eace7..66b6656 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.Type.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.Type.html
@@ -27,2569 +27,2540 @@
 019 */
 020package org.apache.hadoop.hbase;
 021
-022import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
-023import static 
org.apache.hadoop.hbase.util.Bytes.len;
-024
-025import java.io.DataInput;
-026import java.io.DataOutput;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.nio.ByteBuffer;
-030import java.util.ArrayList;
-031import java.util.Arrays;
-032import java.util.HashMap;
-033import java.util.Iterator;
-034import java.util.List;
-035import java.util.Map;
-036import java.util.Optional;
-037
-038import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-039import 
org.apache.hadoop.hbase.util.Bytes;
-040import 
org.apache.hadoop.hbase.util.ClassSize;
-041import 
org.apache.hadoop.io.RawComparator;
-042import 
org.apache.yetus.audience.InterfaceAudience;
-043import org.slf4j.Logger;
-044import org.slf4j.LoggerFactory;
-045
-046import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-047
-048/**
-049 * An HBase Key/Value. This is the 
fundamental HBase Type.
+022import static 
org.apache.hadoop.hbase.util.Bytes.len;
+023
+024import java.io.DataInput;
+025import java.io.DataOutput;
+026import java.io.IOException;
+027import java.io.OutputStream;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Arrays;
+031import java.util.HashMap;
+032import java.util.Iterator;
+033import java.util.List;
+034import java.util.Map;
+035import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+036import 
org.apache.hadoop.hbase.util.Bytes;
+037import 
org.apache.hadoop.hbase.util.ClassSize;
+038import 
org.apache.hadoop.io.RawComparator;
+039import 
org.apache.yetus.audience.InterfaceAudience;
+040import org.slf4j.Logger;
+041import org.slf4j.LoggerFactory;
+042
+043import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+044
+045/**
+046 * An HBase Key/Value. This is the 
fundamental HBase Type.
+047 * p
+048 * HBase applications and users should 
use the Cell interface and avoid directly using KeyValue and
+049 * member functions not defined in 
Cell.
 050 * p
-051 * HBase applications and users should 
use the Cell interface and avoid directly using KeyValue and
-052 * member functions not defined in 
Cell.
-053 * p
-054 * If being used client-side, the primary 
methods to access individual fields are
-055 * {@link #getRowArray()}, {@link 
#getFamilyArray()}, {@link #getQualifierArray()},
-056 * {@link #getTimestamp()}, and {@link 
#getValueArray()}. These methods allocate new byte arrays
-057 * and return copies. Avoid their use 
server-side.
-058 * p
-059 * Instances of this class are immutable. 
They do not implement Comparable but Comparators are
-060 * provided. Comparators change with 
context, whether user table or a catalog table comparison. Its
-061 * critical you use the appropriate 
comparator. There are Comparators for normal HFiles, Meta's
-062 * Hfiles, and bloom filter keys.
-063 * p
-064 * KeyValue wraps a byte array and takes 
offsets and lengths into passed array at where to start
-065 * interpreting the content as KeyValue. 
The KeyValue format inside a byte array is:
-066 * codelt;keylengthgt; 
lt;valuelengthgt; lt;keygt; 
lt;valuegt;/code Key is further
-067 * decomposed as: 
codelt;rowlengthgt; lt;rowgt; 
lt;columnfamilylengthgt;
-068 * lt;columnfamilygt; 
lt;columnqualifiergt;
-069 * lt;timestampgt; 
lt;keytypegt;/code The coderowlength/code 
maximum is
-070 * 
codeShort.MAX_SIZE/code, column family length maximum is 
codeByte.MAX_SIZE/code, and
-071 * column qualifier + key length must be 
lt; codeInteger.MAX_SIZE/code. The column does not
-072 * contain the family/qualifier 
delimiter, {@link #COLUMN_FAMILY_DELIMITER}br
-073 * KeyValue can optionally contain Tags. 
When it contains tags, it is added in the byte array after
-074 * the value part. The format for this 
part is: 
codelt;tagslengthgt;lt;tagsbytesgt;/code.
-075 * codetagslength/code 
maximum is codeShort.MAX_SIZE/code. The 
codetagsbytes/code
-076 * contain one or more tags where as each 
tag is of the form
-077 * 
codelt;taglengthgt;lt;tagtypegt;lt;tagbytesgt;/code.
 codetagtype/code is one byte
-078 * and codetaglength/code 
maximum is codeShort.MAX_SIZE/code and it includes 1 byte 
type
-079 * length and actual tag bytes length.
-080 */
-081@InterfaceAudience.Private
-082public class KeyValue implements 
ExtendedCell {
-083  private static final 
ArrayListTag 

[04/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowDeleteFamilyCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowDeleteFamilyCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowDeleteFamilyCell.html
index 3400507..2baa140 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowDeleteFamilyCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowDeleteFamilyCell.html
@@ -28,3034 +28,2926 @@
 020import static 
org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
 021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
 022
-023import 
com.google.common.annotations.VisibleForTesting;
-024
-025import java.io.DataOutput;
-026import java.io.DataOutputStream;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.math.BigDecimal;
-030import java.nio.ByteBuffer;
-031import java.util.ArrayList;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Optional;
-035
-036import 
org.apache.hadoop.hbase.KeyValue.Type;
-037import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-038import 
org.apache.hadoop.hbase.io.HeapSize;
-039import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-040import 
org.apache.hadoop.hbase.io.util.Dictionary;
-041import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-042import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-043import 
org.apache.hadoop.hbase.util.ByteRange;
-044import 
org.apache.hadoop.hbase.util.Bytes;
-045import 
org.apache.hadoop.hbase.util.ClassSize;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047
-048
-049/**
-050 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
-051 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
-052 */
-053@InterfaceAudience.Private
-054public final class PrivateCellUtil {
-055
-056  /**
-057   * Private constructor to keep this 
class from being instantiated.
-058   */
-059  private PrivateCellUtil() {
-060  }
+023import java.io.DataOutput;
+024import java.io.DataOutputStream;
+025import java.io.IOException;
+026import java.io.OutputStream;
+027import java.math.BigDecimal;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Iterator;
+031import java.util.List;
+032import java.util.Optional;
+033import 
org.apache.hadoop.hbase.KeyValue.Type;
+034import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
+035import 
org.apache.hadoop.hbase.io.HeapSize;
+036import 
org.apache.hadoop.hbase.io.TagCompressionContext;
+037import 
org.apache.hadoop.hbase.io.util.Dictionary;
+038import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+039import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+040import 
org.apache.hadoop.hbase.util.ByteRange;
+041import 
org.apache.hadoop.hbase.util.Bytes;
+042import 
org.apache.hadoop.hbase.util.ClassSize;
+043import 
org.apache.yetus.audience.InterfaceAudience;
+044
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+046
+047/**
+048 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
+049 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
+050 */
+051@InterfaceAudience.Private
+052public final class PrivateCellUtil {
+053
+054  /**
+055   * Private constructor to keep this 
class from being instantiated.
+056   */
+057  private PrivateCellUtil() {
+058  }
+059
+060  /*** ByteRange 
***/
 061
-062  /*** ByteRange 
***/
-063
-064  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-065return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-066  }
-067
-068  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-069return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-070  }
-071
-072  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-073return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-074  cell.getQualifierLength());
-075  }
-076
-077  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-078return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-079  }
-080
-081  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-082return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-083  }
+062  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
+063return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
+064  }
+065
+066  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
+067

[08/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColByteBufferCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColByteBufferCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColByteBufferCell.html
index 3400507..2baa140 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColByteBufferCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.FirstOnRowColByteBufferCell.html
@@ -28,3034 +28,2926 @@
 020import static 
org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
 021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
 022
-023import 
com.google.common.annotations.VisibleForTesting;
-024
-025import java.io.DataOutput;
-026import java.io.DataOutputStream;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.math.BigDecimal;
-030import java.nio.ByteBuffer;
-031import java.util.ArrayList;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Optional;
-035
-036import 
org.apache.hadoop.hbase.KeyValue.Type;
-037import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-038import 
org.apache.hadoop.hbase.io.HeapSize;
-039import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-040import 
org.apache.hadoop.hbase.io.util.Dictionary;
-041import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-042import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-043import 
org.apache.hadoop.hbase.util.ByteRange;
-044import 
org.apache.hadoop.hbase.util.Bytes;
-045import 
org.apache.hadoop.hbase.util.ClassSize;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047
-048
-049/**
-050 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
-051 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
-052 */
-053@InterfaceAudience.Private
-054public final class PrivateCellUtil {
-055
-056  /**
-057   * Private constructor to keep this 
class from being instantiated.
-058   */
-059  private PrivateCellUtil() {
-060  }
+023import java.io.DataOutput;
+024import java.io.DataOutputStream;
+025import java.io.IOException;
+026import java.io.OutputStream;
+027import java.math.BigDecimal;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Iterator;
+031import java.util.List;
+032import java.util.Optional;
+033import 
org.apache.hadoop.hbase.KeyValue.Type;
+034import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
+035import 
org.apache.hadoop.hbase.io.HeapSize;
+036import 
org.apache.hadoop.hbase.io.TagCompressionContext;
+037import 
org.apache.hadoop.hbase.io.util.Dictionary;
+038import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+039import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+040import 
org.apache.hadoop.hbase.util.ByteRange;
+041import 
org.apache.hadoop.hbase.util.Bytes;
+042import 
org.apache.hadoop.hbase.util.ClassSize;
+043import 
org.apache.yetus.audience.InterfaceAudience;
+044
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+046
+047/**
+048 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
+049 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
+050 */
+051@InterfaceAudience.Private
+052public final class PrivateCellUtil {
+053
+054  /**
+055   * Private constructor to keep this 
class from being instantiated.
+056   */
+057  private PrivateCellUtil() {
+058  }
+059
+060  /*** ByteRange 
***/
 061
-062  /*** ByteRange 
***/
-063
-064  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-065return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-066  }
-067
-068  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-069return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-070  }
-071
-072  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-073return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-074  cell.getQualifierLength());
-075  }
-076
-077  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-078return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-079  }
-080
-081  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-082return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-083  }
+062  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
+063return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
+064  }
+065
+066  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
+067 

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyByteBufferCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyByteBufferCell.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyByteBufferCell.html
index 3400507..2baa140 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyByteBufferCell.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyByteBufferCell.html
@@ -28,3034 +28,2926 @@
 020import static 
org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
 021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
 022
-023import 
com.google.common.annotations.VisibleForTesting;
-024
-025import java.io.DataOutput;
-026import java.io.DataOutputStream;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.math.BigDecimal;
-030import java.nio.ByteBuffer;
-031import java.util.ArrayList;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Optional;
-035
-036import 
org.apache.hadoop.hbase.KeyValue.Type;
-037import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-038import 
org.apache.hadoop.hbase.io.HeapSize;
-039import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-040import 
org.apache.hadoop.hbase.io.util.Dictionary;
-041import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-042import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-043import 
org.apache.hadoop.hbase.util.ByteRange;
-044import 
org.apache.hadoop.hbase.util.Bytes;
-045import 
org.apache.hadoop.hbase.util.ClassSize;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047
-048
-049/**
-050 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
-051 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
-052 */
-053@InterfaceAudience.Private
-054public final class PrivateCellUtil {
-055
-056  /**
-057   * Private constructor to keep this 
class from being instantiated.
-058   */
-059  private PrivateCellUtil() {
-060  }
+023import java.io.DataOutput;
+024import java.io.DataOutputStream;
+025import java.io.IOException;
+026import java.io.OutputStream;
+027import java.math.BigDecimal;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Iterator;
+031import java.util.List;
+032import java.util.Optional;
+033import 
org.apache.hadoop.hbase.KeyValue.Type;
+034import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
+035import 
org.apache.hadoop.hbase.io.HeapSize;
+036import 
org.apache.hadoop.hbase.io.TagCompressionContext;
+037import 
org.apache.hadoop.hbase.io.util.Dictionary;
+038import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+039import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+040import 
org.apache.hadoop.hbase.util.ByteRange;
+041import 
org.apache.hadoop.hbase.util.Bytes;
+042import 
org.apache.hadoop.hbase.util.ClassSize;
+043import 
org.apache.yetus.audience.InterfaceAudience;
+044
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+046
+047/**
+048 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
+049 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
+050 */
+051@InterfaceAudience.Private
+052public final class PrivateCellUtil {
+053
+054  /**
+055   * Private constructor to keep this 
class from being instantiated.
+056   */
+057  private PrivateCellUtil() {
+058  }
+059
+060  /*** ByteRange 
***/
 061
-062  /*** ByteRange 
***/
-063
-064  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-065return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-066  }
-067
-068  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-069return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-070  }
-071
-072  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-073return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-074  cell.getQualifierLength());
-075  }
-076
-077  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-078return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-079  }
-080
-081  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-082return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-083  }
+062  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
+063return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
+064  }
+065
+066  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
+067return 

[11/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyCell.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyCell.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyCell.html
index 3400507..2baa140 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyCell.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/PrivateCellUtil.EmptyCell.html
@@ -28,3034 +28,2926 @@
 020import static 
org.apache.hadoop.hbase.HConstants.EMPTY_BYTE_ARRAY;
 021import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
 022
-023import 
com.google.common.annotations.VisibleForTesting;
-024
-025import java.io.DataOutput;
-026import java.io.DataOutputStream;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.math.BigDecimal;
-030import java.nio.ByteBuffer;
-031import java.util.ArrayList;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Optional;
-035
-036import 
org.apache.hadoop.hbase.KeyValue.Type;
-037import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-038import 
org.apache.hadoop.hbase.io.HeapSize;
-039import 
org.apache.hadoop.hbase.io.TagCompressionContext;
-040import 
org.apache.hadoop.hbase.io.util.Dictionary;
-041import 
org.apache.hadoop.hbase.io.util.StreamUtils;
-042import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-043import 
org.apache.hadoop.hbase.util.ByteRange;
-044import 
org.apache.hadoop.hbase.util.Bytes;
-045import 
org.apache.hadoop.hbase.util.ClassSize;
-046import 
org.apache.yetus.audience.InterfaceAudience;
-047
-048
-049/**
-050 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
-051 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
-052 */
-053@InterfaceAudience.Private
-054public final class PrivateCellUtil {
-055
-056  /**
-057   * Private constructor to keep this 
class from being instantiated.
-058   */
-059  private PrivateCellUtil() {
-060  }
+023import java.io.DataOutput;
+024import java.io.DataOutputStream;
+025import java.io.IOException;
+026import java.io.OutputStream;
+027import java.math.BigDecimal;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Iterator;
+031import java.util.List;
+032import java.util.Optional;
+033import 
org.apache.hadoop.hbase.KeyValue.Type;
+034import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
+035import 
org.apache.hadoop.hbase.io.HeapSize;
+036import 
org.apache.hadoop.hbase.io.TagCompressionContext;
+037import 
org.apache.hadoop.hbase.io.util.Dictionary;
+038import 
org.apache.hadoop.hbase.io.util.StreamUtils;
+039import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+040import 
org.apache.hadoop.hbase.util.ByteRange;
+041import 
org.apache.hadoop.hbase.util.Bytes;
+042import 
org.apache.hadoop.hbase.util.ClassSize;
+043import 
org.apache.yetus.audience.InterfaceAudience;
+044
+045import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+046
+047/**
+048 * Utility methods helpful slinging 
{@link Cell} instances. It has more powerful and
+049 * rich set of APIs than those in {@link 
CellUtil} for internal usage.
+050 */
+051@InterfaceAudience.Private
+052public final class PrivateCellUtil {
+053
+054  /**
+055   * Private constructor to keep this 
class from being instantiated.
+056   */
+057  private PrivateCellUtil() {
+058  }
+059
+060  /*** ByteRange 
***/
 061
-062  /*** ByteRange 
***/
-063
-064  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
-065return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
-066  }
-067
-068  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
-069return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 
cell.getFamilyLength());
-070  }
-071
-072  public static ByteRange 
fillQualifierRange(Cell cell, ByteRange range) {
-073return 
range.set(cell.getQualifierArray(), cell.getQualifierOffset(),
-074  cell.getQualifierLength());
-075  }
-076
-077  public static ByteRange 
fillValueRange(Cell cell, ByteRange range) {
-078return 
range.set(cell.getValueArray(), cell.getValueOffset(), 
cell.getValueLength());
-079  }
-080
-081  public static ByteRange 
fillTagRange(Cell cell, ByteRange range) {
-082return range.set(cell.getTagsArray(), 
cell.getTagsOffset(), cell.getTagsLength());
-083  }
+062  public static ByteRange 
fillRowRange(Cell cell, ByteRange range) {
+063return range.set(cell.getRowArray(), 
cell.getRowOffset(), cell.getRowLength());
+064  }
+065
+066  public static ByteRange 
fillFamilyRange(Cell cell, ByteRange range) {
+067return 
range.set(cell.getFamilyArray(), cell.getFamilyOffset(), 

[18/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KVComparator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KVComparator.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KVComparator.html
index f8eace7..66b6656 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KVComparator.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/KeyValue.KVComparator.html
@@ -27,2569 +27,2540 @@
 019 */
 020package org.apache.hadoop.hbase;
 021
-022import static 
org.apache.hadoop.hbase.Tag.TAG_LENGTH_SIZE;
-023import static 
org.apache.hadoop.hbase.util.Bytes.len;
-024
-025import java.io.DataInput;
-026import java.io.DataOutput;
-027import java.io.IOException;
-028import java.io.OutputStream;
-029import java.nio.ByteBuffer;
-030import java.util.ArrayList;
-031import java.util.Arrays;
-032import java.util.HashMap;
-033import java.util.Iterator;
-034import java.util.List;
-035import java.util.Map;
-036import java.util.Optional;
-037
-038import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
-039import 
org.apache.hadoop.hbase.util.Bytes;
-040import 
org.apache.hadoop.hbase.util.ClassSize;
-041import 
org.apache.hadoop.io.RawComparator;
-042import 
org.apache.yetus.audience.InterfaceAudience;
-043import org.slf4j.Logger;
-044import org.slf4j.LoggerFactory;
-045
-046import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-047
-048/**
-049 * An HBase Key/Value. This is the 
fundamental HBase Type.
+022import static 
org.apache.hadoop.hbase.util.Bytes.len;
+023
+024import java.io.DataInput;
+025import java.io.DataOutput;
+026import java.io.IOException;
+027import java.io.OutputStream;
+028import java.nio.ByteBuffer;
+029import java.util.ArrayList;
+030import java.util.Arrays;
+031import java.util.HashMap;
+032import java.util.Iterator;
+033import java.util.List;
+034import java.util.Map;
+035import 
org.apache.hadoop.hbase.util.ByteBufferUtils;
+036import 
org.apache.hadoop.hbase.util.Bytes;
+037import 
org.apache.hadoop.hbase.util.ClassSize;
+038import 
org.apache.hadoop.io.RawComparator;
+039import 
org.apache.yetus.audience.InterfaceAudience;
+040import org.slf4j.Logger;
+041import org.slf4j.LoggerFactory;
+042
+043import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+044
+045/**
+046 * An HBase Key/Value. This is the 
fundamental HBase Type.
+047 * p
+048 * HBase applications and users should 
use the Cell interface and avoid directly using KeyValue and
+049 * member functions not defined in 
Cell.
 050 * p
-051 * HBase applications and users should 
use the Cell interface and avoid directly using KeyValue and
-052 * member functions not defined in 
Cell.
-053 * p
-054 * If being used client-side, the primary 
methods to access individual fields are
-055 * {@link #getRowArray()}, {@link 
#getFamilyArray()}, {@link #getQualifierArray()},
-056 * {@link #getTimestamp()}, and {@link 
#getValueArray()}. These methods allocate new byte arrays
-057 * and return copies. Avoid their use 
server-side.
-058 * p
-059 * Instances of this class are immutable. 
They do not implement Comparable but Comparators are
-060 * provided. Comparators change with 
context, whether user table or a catalog table comparison. Its
-061 * critical you use the appropriate 
comparator. There are Comparators for normal HFiles, Meta's
-062 * Hfiles, and bloom filter keys.
-063 * p
-064 * KeyValue wraps a byte array and takes 
offsets and lengths into passed array at where to start
-065 * interpreting the content as KeyValue. 
The KeyValue format inside a byte array is:
-066 * codelt;keylengthgt; 
lt;valuelengthgt; lt;keygt; 
lt;valuegt;/code Key is further
-067 * decomposed as: 
codelt;rowlengthgt; lt;rowgt; 
lt;columnfamilylengthgt;
-068 * lt;columnfamilygt; 
lt;columnqualifiergt;
-069 * lt;timestampgt; 
lt;keytypegt;/code The coderowlength/code 
maximum is
-070 * 
codeShort.MAX_SIZE/code, column family length maximum is 
codeByte.MAX_SIZE/code, and
-071 * column qualifier + key length must be 
lt; codeInteger.MAX_SIZE/code. The column does not
-072 * contain the family/qualifier 
delimiter, {@link #COLUMN_FAMILY_DELIMITER}br
-073 * KeyValue can optionally contain Tags. 
When it contains tags, it is added in the byte array after
-074 * the value part. The format for this 
part is: 
codelt;tagslengthgt;lt;tagsbytesgt;/code.
-075 * codetagslength/code 
maximum is codeShort.MAX_SIZE/code. The 
codetagsbytes/code
-076 * contain one or more tags where as each 
tag is of the form
-077 * 
codelt;taglengthgt;lt;tagtypegt;lt;tagbytesgt;/code.
 codetagtype/code is one byte
-078 * and codetaglength/code 
maximum is codeShort.MAX_SIZE/code and it includes 1 byte 
type
-079 * length and actual tag bytes length.
-080 */
-081@InterfaceAudience.Private
-082public class KeyValue implements 
ExtendedCell {
-083  private 

[44/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 2ecc07f..e9373c7 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -289,7 +289,7 @@
 3466
 0
 0
-19162
+19152
 
 Files
 
@@ -807,7 +807,7 @@
 org/apache/hadoop/hbase/TestHColumnDescriptor.java
 0
 0
-2
+1
 
 org/apache/hadoop/hbase/TestHColumnDescriptorDefaultVersions.java
 0
@@ -1717,7 +1717,7 @@
 org/apache/hadoop/hbase/client/Mutation.java
 0
 0
-24
+23
 
 org/apache/hadoop/hbase/client/NoOpRetryableCallerInterceptor.java
 0
@@ -2129,7835 +2129,7825 @@
 0
 2
 
-org/apache/hadoop/hbase/client/TestDelayingRunner.java
-0
-0
-1
-
 org/apache/hadoop/hbase/client/TestDropTimeoutRequest.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestEnableTable.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/TestFastFail.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/client/TestFromClientSide.java
 0
 0
 156
-
+
 org/apache/hadoop/hbase/client/TestFromClientSide3.java
 0
 0
 24
-
+
 org/apache/hadoop/hbase/client/TestFromClientSideNoCodec.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestFromClientSideScanExcpetion.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestFromClientSideWithCoprocessor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestGet.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/TestHBaseAdminNoCluster.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/client/TestHCM.java
 0
 0
 18
-
+
 org/apache/hadoop/hbase/client/TestHTableMultiplexer.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/client/TestHTableMultiplexerFlushCache.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/TestHTableMultiplexerViaMocks.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestImmutableHColumnDescriptor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestImmutableHRegionInfo.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestImmutableHTableDescriptor.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestIncrementsFromClientSide.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestIntraRowPagination.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestLeaseRenewal.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/TestLimitedScanWithFilter.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestMetaCache.java
 0
 0
 17
-
+
 org/apache/hadoop/hbase/client/TestMetaWithReplicas.java
 0
 0
 10
-
+
 org/apache/hadoop/hbase/client/TestMetricsConnection.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/client/TestMobCloneSnapshotFromClient.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestMobSnapshotCloneIndependence.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestMultiParallel.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
 0
 0
 32
-
+
 org/apache/hadoop/hbase/client/TestOperation.java
 0
 0
-12
-
+11
+
 org/apache/hadoop/hbase/client/TestProcedureFuture.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestPutWithDelete.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/TestRawAsyncTableLimitedScanWithFilter.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestRawAsyncTableScan.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestReplicaWithCluster.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/TestReplicasClient.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClient.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestResult.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/client/TestResultFromCoprocessor.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestResultSizeEstimation.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestScan.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/client/TestScanWithoutFetchingData.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestScannerTimeout.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/client/TestScannersFromClientSide.java
 0
 0
 20
-
+
 org/apache/hadoop/hbase/client/TestScannersFromClientSide2.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestServerBusyException.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestShortCircuitConnection.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestSizeFailures.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestSmallReversedScanner.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestSnapshotFromAdmin.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/client/TestSnapshotFromClient.java
 0
 0
 13
-
+
 org/apache/hadoop/hbase/client/TestSnapshotMetadata.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestSnapshotWithAcl.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java
 0
 0
 5
-
+
 

[36/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.TagRewriteByteBufferCell.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.TagRewriteByteBufferCell.html
 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.TagRewriteByteBufferCell.html
index 6aaf50a..a8c2ef3 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.TagRewriteByteBufferCell.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/PrivateCellUtil.TagRewriteByteBufferCell.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -126,7 +126,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class PrivateCellUtil.TagRewriteByteBufferCell
+static class PrivateCellUtil.TagRewriteByteBufferCell
 extends ByteBufferCell
 implements ExtendedCell
 
@@ -305,101 +305,89 @@ implements getSerializedSize(booleanwithTags)
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html?is-external=true;
 title="class or interface in java.util">OptionalTag
-getTag(bytetype)
-Returns the specific tag of the given type
-
-
-
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTag
-getTags()
-Creates a list of tags in the current cell
-
-
-
 byte[]
 getTagsArray()
 Contiguous raw bytes representing tags that may start at 
any index in the containing array.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
 getTagsByteBuffer()
 
-
+
 int
 getTagsLength()
 HBase internally uses 2 bytes to store tags length in 
Cell.
 
 
-
+
 int
 getTagsOffset()
 
-
+
 int
 getTagsPosition()
 
-
+
 long
 getTimestamp()
 
-
+
 byte
 getTypeByte()
 
-
+
 byte[]
 getValueArray()
 Contiguous raw bytes that may start at any index in the 
containing array.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
 getValueByteBuffer()
 
-
+
 int
 getValueLength()
 
-
+
 int
 getValueOffset()
 
-
+
 int
 getValuePosition()
 
-
+
 long
 heapSize()
 
-
+
 void
 setSequenceId(longseqId)
 Sets with the given seqId.
 
 
-
+
 void
 setTimestamp(byte[]ts)
 Sets with the given timestamp.
 
 
-
+
 void
 setTimestamp(longts)
 Sets with the given timestamp.
 
 
-
+
 void
 write(http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBufferbuf,
  intoffset)
 Write this Cell into the given buf's offset in a KeyValue format.
 
 
-
+
 int
 write(http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true;
 title="class or interface in java.io">OutputStreamout,
  booleanwithTags)
@@ -426,7 +414,7 @@ implements 
 
 Methods inherited from interfaceorg.apache.hadoop.hbase.RawCell
-checkForTagsLength,
 cloneTags
+checkForTagsLength,
 cloneTags,
 getTag,
 getTags
 
 
 
@@ -448,7 +436,7 @@ implements 
 
 cell
-protectedByteBufferCell cell
+protectedByteBufferCell cell
 
 
 
@@ -457,7 +445,7 @@ implements 
 
 tags
-protectedbyte[] tags
+protectedbyte[] tags
 
 
 
@@ -466,7 +454,7 @@ implements 
 
 HEAP_SIZE_OVERHEAD
-private static finalint HEAP_SIZE_OVERHEAD
+private static finalint HEAP_SIZE_OVERHEAD
 
 
 
@@ -483,7 +471,7 @@ implements 
 
 TagRewriteByteBufferCell
-publicTagRewriteByteBufferCell(ByteBufferCellcell,
+publicTagRewriteByteBufferCell(ByteBufferCellcell,
 byte[]tags)
 
 Parameters:
@@ -506,7 +494,7 @@ implements 
 
 getRowArray
-publicbyte[]getRowArray()
+publicbyte[]getRowArray()
 Description copied from 
interface:Cell
 Contiguous raw bytes that may start at any index in the 
containing array. Max length is
  Short.MAX_VALUE which is 32,767 bytes.
@@ -524,7 +512,7 @@ implements 
 
 getRowOffset
-publicintgetRowOffset()
+publicintgetRowOffset()
 
 Specified by:
 getRowOffsetin
 interfaceCell
@@ -539,7 +527,7 @@ implements 
 
 getRowLength
-publicshortgetRowLength()
+publicshortgetRowLength()
 
 Specified by:
 

[46/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
index d0a626e..06b3e36 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
@@ -35,70 +35,70 @@
 027import java.net.InetSocketAddress;
 028import java.net.URLDecoder;
 029import java.net.URLEncoder;
-030import 
java.nio.charset.StandardCharsets;
-031import java.util.ArrayList;
-032import java.util.Arrays;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.Set;
-036import java.util.TreeMap;
-037import java.util.TreeSet;
-038import java.util.UUID;
-039import java.util.function.Function;
-040import java.util.stream.Collectors;
-041
-042import 
org.apache.commons.lang3.StringUtils;
-043import 
org.apache.hadoop.conf.Configuration;
-044import org.apache.hadoop.fs.FileSystem;
-045import org.apache.hadoop.fs.Path;
-046import org.apache.hadoop.hbase.Cell;
-047import 
org.apache.hadoop.hbase.CellComparator;
-048import 
org.apache.hadoop.hbase.CellUtil;
-049import 
org.apache.hadoop.hbase.HConstants;
-050import 
org.apache.hadoop.hbase.HRegionLocation;
-051import 
org.apache.hadoop.hbase.HTableDescriptor;
+030import java.util.ArrayList;
+031import java.util.Arrays;
+032import java.util.List;
+033import java.util.Map;
+034import java.util.Set;
+035import java.util.TreeMap;
+036import java.util.TreeSet;
+037import java.util.UUID;
+038import java.util.function.Function;
+039import java.util.stream.Collectors;
+040
+041import 
org.apache.commons.lang3.StringUtils;
+042import 
org.apache.hadoop.conf.Configuration;
+043import org.apache.hadoop.fs.FileSystem;
+044import org.apache.hadoop.fs.Path;
+045import org.apache.hadoop.hbase.Cell;
+046import 
org.apache.hadoop.hbase.CellComparator;
+047import 
org.apache.hadoop.hbase.CellUtil;
+048import 
org.apache.hadoop.hbase.HConstants;
+049import 
org.apache.hadoop.hbase.HRegionLocation;
+050import 
org.apache.hadoop.hbase.HTableDescriptor;
+051import 
org.apache.hadoop.hbase.KeyValue;
 052import 
org.apache.hadoop.hbase.PrivateCellUtil;
-053import 
org.apache.hadoop.hbase.KeyValue;
-054import 
org.apache.hadoop.hbase.TableName;
-055import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-056import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-057import 
org.apache.hadoop.hbase.client.Connection;
-058import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-059import 
org.apache.hadoop.hbase.client.Put;
-060import 
org.apache.hadoop.hbase.client.RegionLocator;
-061import 
org.apache.hadoop.hbase.client.Table;
-062import 
org.apache.hadoop.hbase.client.TableDescriptor;
-063import 
org.apache.hadoop.hbase.fs.HFileSystem;
-064import 
org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-065import 
org.apache.hadoop.hbase.io.compress.Compression;
-066import 
org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
-067import 
org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
-068import 
org.apache.hadoop.hbase.io.hfile.CacheConfig;
-069import 
org.apache.hadoop.hbase.io.hfile.HFile;
-070import 
org.apache.hadoop.hbase.io.hfile.HFileContext;
-071import 
org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
-072import 
org.apache.hadoop.hbase.io.hfile.HFileWriterImpl;
-073import 
org.apache.hadoop.hbase.regionserver.BloomType;
-074import 
org.apache.hadoop.hbase.regionserver.HStore;
-075import 
org.apache.hadoop.hbase.regionserver.StoreFileWriter;
-076import 
org.apache.hadoop.hbase.util.Bytes;
-077import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-078import 
org.apache.hadoop.hbase.util.FSUtils;
-079import 
org.apache.hadoop.hbase.util.MapReduceCell;
-080import 
org.apache.hadoop.io.NullWritable;
-081import 
org.apache.hadoop.io.SequenceFile;
-082import org.apache.hadoop.io.Text;
-083import org.apache.hadoop.mapreduce.Job;
-084import 
org.apache.hadoop.mapreduce.OutputCommitter;
-085import 
org.apache.hadoop.mapreduce.OutputFormat;
-086import 
org.apache.hadoop.mapreduce.RecordWriter;
-087import 
org.apache.hadoop.mapreduce.TaskAttemptContext;
-088import 
org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
-089import 
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-090import 
org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
-091import 
org.apache.yetus.audience.InterfaceAudience;
-092import org.slf4j.Logger;
-093import org.slf4j.LoggerFactory;
+053import 
org.apache.hadoop.hbase.TableName;
+054import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+055import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+056import 
org.apache.hadoop.hbase.client.Connection;
+057import 

[21/51] [partial] hbase-site git commit: Published site at .

2017-12-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d2b28a1a/devapidocs/org/apache/hadoop/hbase/security/SaslUtil.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/security/SaslUtil.html 
b/devapidocs/org/apache/hadoop/hbase/security/SaslUtil.html
index e3c9757..512237b 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/SaslUtil.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/SaslUtil.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class SaslUtil
+public class SaslUtil
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -258,7 +258,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -267,7 +267,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 SASL_DEFAULT_REALM
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String SASL_DEFAULT_REALM
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String SASL_DEFAULT_REALM
 
 See Also:
 Constant
 Field Values
@@ -280,7 +280,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 SWITCH_TO_SIMPLE_AUTH
-public static finalint SWITCH_TO_SIMPLE_AUTH
+public static finalint SWITCH_TO_SIMPLE_AUTH
 
 See Also:
 Constant
 Field Values
@@ -301,7 +301,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 SaslUtil
-publicSaslUtil()
+publicSaslUtil()
 
 
 
@@ -318,7 +318,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 splitKerberosName
-public statichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]splitKerberosName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfullName)
+public statichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]splitKerberosName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringfullName)
 Splitting fully qualified Kerberos name into parts
 
 
@@ -328,7 +328,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 encodeIdentifier
-statichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringencodeIdentifier(byte[]identifier)
+statichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringencodeIdentifier(byte[]identifier)
 
 
 
@@ -337,7 +337,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 decodeIdentifier
-staticbyte[]decodeIdentifier(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringidentifier)
+staticbyte[]decodeIdentifier(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringidentifier)
 
 
 
@@ -346,7 +346,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 encodePassword
-staticchar[]encodePassword(byte[]password)
+staticchar[]encodePassword(byte[]password)
 
 
 
@@ -355,7 +355,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getQop
-public staticSaslUtil.QualityOfProtectiongetQop(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringstringQop)
+public staticSaslUtil.QualityOfProtectiongetQop(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringstringQop)
 Returns SaslUtil.QualityOfProtection
  corresponding to the given stringQop value.
 
@@ -370,7 +370,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 initSaslProperties
-public statichttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringinitSaslProperties(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringrpcProtection)
+public 

hbase git commit: HBASE-19609 Fixed Checkstyle errors in hbase-metrics and enabled Checkstyle to fail on violations

2017-12-27 Thread janh
Repository: hbase
Updated Branches:
  refs/heads/master 3ed68fd70 -> 6d9081b86


HBASE-19609 Fixed Checkstyle errors in hbase-metrics and enabled Checkstyle to 
fail on violations


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6d9081b8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6d9081b8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6d9081b8

Branch: refs/heads/master
Commit: 6d9081b8645f9b0ff4cd4bc50129a585d5518ef4
Parents: 3ed68fd
Author: Jan Hentschel 
Authored: Sun Dec 24 13:27:25 2017 +0100
Committer: Jan Hentschel 
Committed: Wed Dec 27 17:34:13 2017 +0100

--
 hbase-metrics/pom.xml   | 16 
 .../hadoop/hbase/metrics/impl/CounterImpl.java  |  2 +-
 .../hadoop/hbase/metrics/impl/DropwizardMeter.java  |  4 ++--
 .../hbase/metrics/impl/FastLongHistogram.java   |  4 ++--
 .../hadoop/hbase/metrics/impl/HistogramImpl.java|  4 +---
 .../hbase/metrics/impl/MetricRegistriesImpl.java|  4 +---
 .../metrics/impl/MetricRegistryFactoryImpl.java |  4 +---
 .../hbase/metrics/impl/MetricRegistryImpl.java  |  2 +-
 .../hadoop/hbase/metrics/impl/RefCountingMap.java   |  4 +---
 .../apache/hadoop/hbase/metrics/impl/TimerImpl.java |  2 +-
 .../hbase/metrics/impl/TestDropwizardMeter.java |  4 ++--
 .../hbase/metrics/impl/TestRefCountingMap.java  |  8 +++-
 12 files changed, 32 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6d9081b8/hbase-metrics/pom.xml
--
diff --git a/hbase-metrics/pom.xml b/hbase-metrics/pom.xml
index f82ad54..b3ba39d 100644
--- a/hbase-metrics/pom.xml
+++ b/hbase-metrics/pom.xml
@@ -68,6 +68,22 @@
   
 
   
+  
+org.apache.maven.plugins
+maven-checkstyle-plugin
+
+  
+checkstyle
+validate
+
+  check
+
+
+  true
+
+  
+
+  
 
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/6d9081b8/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/CounterImpl.java
--
diff --git 
a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/CounterImpl.java
 
b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/CounterImpl.java
index 848aa5b..8021c06 100644
--- 
a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/CounterImpl.java
+++ 
b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/CounterImpl.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.metrics.impl;
 
 import java.util.concurrent.atomic.LongAdder;
 
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.metrics.Counter;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Custom implementation of {@link org.apache.hadoop.hbase.metrics.Counter} 
using LongAdder.

http://git-wip-us.apache.org/repos/asf/hbase/blob/6d9081b8/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/DropwizardMeter.java
--
diff --git 
a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/DropwizardMeter.java
 
b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/DropwizardMeter.java
index 8b3bb80..b5c52cf 100644
--- 
a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/DropwizardMeter.java
+++ 
b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/DropwizardMeter.java
@@ -17,12 +17,12 @@
  */
 package org.apache.hadoop.hbase.metrics.impl;
 
+import com.codahale.metrics.Meter;
+
 import java.util.Objects;
 
 import org.apache.yetus.audience.InterfaceAudience;
 
-import com.codahale.metrics.Meter;
-
 /**
  * Dropwizard metrics implementation of {@link 
org.apache.hadoop.hbase.metrics.Meter}.
  */

http://git-wip-us.apache.org/repos/asf/hbase/blob/6d9081b8/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java
--
diff --git 
a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java
 
b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java
index a341d21..69f4ae5 100644
--- 
a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java
+++ 
b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java
@@ -22,10 +22,10 @@ import java.util.concurrent.atomic.AtomicLong;
 import 

hbase git commit: HBASE-19609 Fixed Checkstyle errors in hbase-metrics and enabled Checkstyle to fail on violations

2017-12-27 Thread janh
Repository: hbase
Updated Branches:
  refs/heads/branch-2 2468f3002 -> b9f44bcfe


HBASE-19609 Fixed Checkstyle errors in hbase-metrics and enabled Checkstyle to 
fail on violations


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b9f44bcf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b9f44bcf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b9f44bcf

Branch: refs/heads/branch-2
Commit: b9f44bcfe7dd74847bf0c06c083a851b65d278d0
Parents: 2468f30
Author: Jan Hentschel 
Authored: Sun Dec 24 13:27:25 2017 +0100
Committer: Jan Hentschel 
Committed: Wed Dec 27 17:39:42 2017 +0100

--
 hbase-metrics/pom.xml   | 16 
 .../hadoop/hbase/metrics/impl/CounterImpl.java  |  2 +-
 .../hadoop/hbase/metrics/impl/DropwizardMeter.java  |  4 ++--
 .../hbase/metrics/impl/FastLongHistogram.java   |  4 ++--
 .../hadoop/hbase/metrics/impl/HistogramImpl.java|  4 +---
 .../hbase/metrics/impl/MetricRegistriesImpl.java|  4 +---
 .../metrics/impl/MetricRegistryFactoryImpl.java |  4 +---
 .../hbase/metrics/impl/MetricRegistryImpl.java  |  2 +-
 .../hadoop/hbase/metrics/impl/RefCountingMap.java   |  4 +---
 .../apache/hadoop/hbase/metrics/impl/TimerImpl.java |  2 +-
 .../hbase/metrics/impl/TestDropwizardMeter.java |  4 ++--
 .../hbase/metrics/impl/TestRefCountingMap.java  |  8 +++-
 12 files changed, 32 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b9f44bcf/hbase-metrics/pom.xml
--
diff --git a/hbase-metrics/pom.xml b/hbase-metrics/pom.xml
index 0f5ac2a..3107d75 100644
--- a/hbase-metrics/pom.xml
+++ b/hbase-metrics/pom.xml
@@ -68,6 +68,22 @@
   
 
   
+  
+org.apache.maven.plugins
+maven-checkstyle-plugin
+
+  
+checkstyle
+validate
+
+  check
+
+
+  true
+
+  
+
+  
 
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b9f44bcf/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/CounterImpl.java
--
diff --git 
a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/CounterImpl.java
 
b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/CounterImpl.java
index 848aa5b..8021c06 100644
--- 
a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/CounterImpl.java
+++ 
b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/CounterImpl.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.metrics.impl;
 
 import java.util.concurrent.atomic.LongAdder;
 
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.metrics.Counter;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Custom implementation of {@link org.apache.hadoop.hbase.metrics.Counter} 
using LongAdder.

http://git-wip-us.apache.org/repos/asf/hbase/blob/b9f44bcf/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/DropwizardMeter.java
--
diff --git 
a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/DropwizardMeter.java
 
b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/DropwizardMeter.java
index 8b3bb80..b5c52cf 100644
--- 
a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/DropwizardMeter.java
+++ 
b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/DropwizardMeter.java
@@ -17,12 +17,12 @@
  */
 package org.apache.hadoop.hbase.metrics.impl;
 
+import com.codahale.metrics.Meter;
+
 import java.util.Objects;
 
 import org.apache.yetus.audience.InterfaceAudience;
 
-import com.codahale.metrics.Meter;
-
 /**
  * Dropwizard metrics implementation of {@link 
org.apache.hadoop.hbase.metrics.Meter}.
  */

http://git-wip-us.apache.org/repos/asf/hbase/blob/b9f44bcf/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java
--
diff --git 
a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java
 
b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java
index a341d21..69f4ae5 100644
--- 
a/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java
+++ 
b/hbase-metrics/src/main/java/org/apache/hadoop/hbase/metrics/impl/FastLongHistogram.java
@@ -22,10 +22,10 @@ import java.util.concurrent.atomic.AtomicLong;
 import 

hbase git commit: HBASE-19619 Modify replication_admin.rb to use ReplicationPeerConfigBuilder

2017-12-27 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-2 fbe633f14 -> ccc3b2bbc


HBASE-19619 Modify replication_admin.rb to use ReplicationPeerConfigBuilder


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ccc3b2bb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ccc3b2bb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ccc3b2bb

Branch: refs/heads/branch-2
Commit: ccc3b2bbc0602d6ea4925176dd9ceb35cedd4fd9
Parents: fbe633f
Author: tedyu 
Authored: Wed Dec 27 02:37:02 2017 -0800
Committer: tedyu 
Committed: Wed Dec 27 02:37:02 2017 -0800

--
 .../src/main/ruby/hbase/replication_admin.rb| 51 
 1 file changed, 30 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ccc3b2bb/hbase-shell/src/main/ruby/hbase/replication_admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/replication_admin.rb 
b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
index f80c547..b9d4a0c 100644
--- a/hbase-shell/src/main/ruby/hbase/replication_admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
@@ -66,22 +66,22 @@ module Hbase
 peer_state = args.fetch(STATE, nil)
 
 # Create and populate a ReplicationPeerConfig
-replication_peer_config = ReplicationPeerConfig.new
-replication_peer_config.set_cluster_key(cluster_key)
+builder = org.apache.hadoop.hbase.replication.ReplicationPeerConfig
+  .newBuilder()
+builder.set_cluster_key(cluster_key)
 
 unless endpoint_classname.nil?
-  
replication_peer_config.set_replication_endpoint_impl(endpoint_classname)
+  builder.set_replication_endpoint_impl(endpoint_classname)
 end
 
 unless config.nil?
-  replication_peer_config.get_configuration.put_all(config)
+  builder.putAllConfiguration(config)
 end
 
 unless data.nil?
   # Convert Strings to Bytes for peer_data
-  peer_data = replication_peer_config.get_peer_data
   data.each do |key, val|
-peer_data.put(Bytes.to_bytes(key), Bytes.to_bytes(val))
+builder.putPeerData(Bytes.to_bytes(key), Bytes.to_bytes(val))
   end
 end
 
@@ -90,8 +90,8 @@ module Hbase
   namespaces.each do |n|
 ns_set.add(n)
   end
-  replication_peer_config.setReplicateAllUserTables(false)
-  replication_peer_config.set_namespaces(ns_set)
+  builder.setReplicateAllUserTables(false)
+  builder.set_namespaces(ns_set)
 end
 
 unless table_cfs.nil?
@@ -100,15 +100,15 @@ module Hbase
   table_cfs.each do |key, val|
 map.put(org.apache.hadoop.hbase.TableName.valueOf(key), val)
   end
-  replication_peer_config.setReplicateAllUserTables(false)
-  replication_peer_config.set_table_cfs_map(map)
+  builder.setReplicateAllUserTables(false)
+  builder.set_table_cfs_map(map)
 end
 
 enabled = true
 unless peer_state.nil?
   enabled = false if peer_state == 'DISABLED'
 end
-@admin.addReplicationPeer(id, replication_peer_config, enabled)
+@admin.addReplicationPeer(id, builder.build, enabled)
   else
 raise(ArgumentError, 'args must be a Hash')
   end
@@ -220,13 +220,18 @@ module Hbase
   unless namespaces.nil?
 rpc = get_peer_config(id)
 unless rpc.nil?
-  ns_set = rpc.getNamespaces
-  ns_set = java.util.HashSet.new if ns_set.nil?
+  if rpc.getNamespaces.nil?
+ns_set = java.util.HashSet.new
+  else
+ns_set = java.util.HashSet.new(rpc.getNamespaces)
+  end
   namespaces.each do |n|
 ns_set.add(n)
   end
-  rpc.setNamespaces(ns_set)
-  @admin.updateReplicationPeerConfig(id, rpc)
+  builder = org.apache.hadoop.hbase.replication.ReplicationPeerConfig
+.newBuilder(rpc)
+  builder.setNamespaces(ns_set)
+  @admin.updateReplicationPeerConfig(id, builder.build)
 end
   end
 end
@@ -238,12 +243,15 @@ module Hbase
 unless rpc.nil?
   ns_set = rpc.getNamespaces
   unless ns_set.nil?
+ns_set = java.util.HashSet.new(ns_set)
 namespaces.each do |n|
   ns_set.remove(n)
 end
   end
-  rpc.setNamespaces(ns_set)
-  @admin.updateReplicationPeerConfig(id, rpc)
+  builder = org.apache.hadoop.hbase.replication.ReplicationPeerConfig
+.newBuilder(rpc)
+  builder.setNamespaces(ns_set)
+  

hbase git commit: HBASE-19619 Modify replication_admin.rb to use ReplicationPeerConfigBuilder

2017-12-27 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 3317b8738 -> 41c2dd04d


HBASE-19619 Modify replication_admin.rb to use ReplicationPeerConfigBuilder


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/41c2dd04
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/41c2dd04
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/41c2dd04

Branch: refs/heads/master
Commit: 41c2dd04da21bb76208f04af104df2e2f444970d
Parents: 3317b87
Author: tedyu 
Authored: Wed Dec 27 02:35:41 2017 -0800
Committer: tedyu 
Committed: Wed Dec 27 02:35:41 2017 -0800

--
 .../src/main/ruby/hbase/replication_admin.rb| 51 
 1 file changed, 30 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/41c2dd04/hbase-shell/src/main/ruby/hbase/replication_admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/replication_admin.rb 
b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
index f80c547..b9d4a0c 100644
--- a/hbase-shell/src/main/ruby/hbase/replication_admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/replication_admin.rb
@@ -66,22 +66,22 @@ module Hbase
 peer_state = args.fetch(STATE, nil)
 
 # Create and populate a ReplicationPeerConfig
-replication_peer_config = ReplicationPeerConfig.new
-replication_peer_config.set_cluster_key(cluster_key)
+builder = org.apache.hadoop.hbase.replication.ReplicationPeerConfig
+  .newBuilder()
+builder.set_cluster_key(cluster_key)
 
 unless endpoint_classname.nil?
-  
replication_peer_config.set_replication_endpoint_impl(endpoint_classname)
+  builder.set_replication_endpoint_impl(endpoint_classname)
 end
 
 unless config.nil?
-  replication_peer_config.get_configuration.put_all(config)
+  builder.putAllConfiguration(config)
 end
 
 unless data.nil?
   # Convert Strings to Bytes for peer_data
-  peer_data = replication_peer_config.get_peer_data
   data.each do |key, val|
-peer_data.put(Bytes.to_bytes(key), Bytes.to_bytes(val))
+builder.putPeerData(Bytes.to_bytes(key), Bytes.to_bytes(val))
   end
 end
 
@@ -90,8 +90,8 @@ module Hbase
   namespaces.each do |n|
 ns_set.add(n)
   end
-  replication_peer_config.setReplicateAllUserTables(false)
-  replication_peer_config.set_namespaces(ns_set)
+  builder.setReplicateAllUserTables(false)
+  builder.set_namespaces(ns_set)
 end
 
 unless table_cfs.nil?
@@ -100,15 +100,15 @@ module Hbase
   table_cfs.each do |key, val|
 map.put(org.apache.hadoop.hbase.TableName.valueOf(key), val)
   end
-  replication_peer_config.setReplicateAllUserTables(false)
-  replication_peer_config.set_table_cfs_map(map)
+  builder.setReplicateAllUserTables(false)
+  builder.set_table_cfs_map(map)
 end
 
 enabled = true
 unless peer_state.nil?
   enabled = false if peer_state == 'DISABLED'
 end
-@admin.addReplicationPeer(id, replication_peer_config, enabled)
+@admin.addReplicationPeer(id, builder.build, enabled)
   else
 raise(ArgumentError, 'args must be a Hash')
   end
@@ -220,13 +220,18 @@ module Hbase
   unless namespaces.nil?
 rpc = get_peer_config(id)
 unless rpc.nil?
-  ns_set = rpc.getNamespaces
-  ns_set = java.util.HashSet.new if ns_set.nil?
+  if rpc.getNamespaces.nil?
+ns_set = java.util.HashSet.new
+  else
+ns_set = java.util.HashSet.new(rpc.getNamespaces)
+  end
   namespaces.each do |n|
 ns_set.add(n)
   end
-  rpc.setNamespaces(ns_set)
-  @admin.updateReplicationPeerConfig(id, rpc)
+  builder = org.apache.hadoop.hbase.replication.ReplicationPeerConfig
+.newBuilder(rpc)
+  builder.setNamespaces(ns_set)
+  @admin.updateReplicationPeerConfig(id, builder.build)
 end
   end
 end
@@ -238,12 +243,15 @@ module Hbase
 unless rpc.nil?
   ns_set = rpc.getNamespaces
   unless ns_set.nil?
+ns_set = java.util.HashSet.new(ns_set)
 namespaces.each do |n|
   ns_set.remove(n)
 end
   end
-  rpc.setNamespaces(ns_set)
-  @admin.updateReplicationPeerConfig(id, rpc)
+  builder = org.apache.hadoop.hbase.replication.ReplicationPeerConfig
+.newBuilder(rpc)
+  builder.setNamespaces(ns_set)
+  

hbase git commit: HBASE-19545 Replace getBytes(StandardCharsets.UTF_8) with Bytes.toBytes

2017-12-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 41c2dd04d -> 7145d9818


HBASE-19545 Replace getBytes(StandardCharsets.UTF_8) with Bytes.toBytes

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7145d981
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7145d981
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7145d981

Branch: refs/heads/master
Commit: 7145d98182fb95f6f1c1119c3e779eed0bc322bb
Parents: 41c2dd0
Author: Peter Somogyi 
Authored: Wed Dec 20 21:17:52 2017 +0100
Committer: Chia-Ping Tsai 
Committed: Wed Dec 27 20:07:25 2017 +0800

--
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |  7 +-
 .../apache/hadoop/hbase/security/SaslUtil.java  |  3 +-
 .../hadoop/hbase/TestHColumnDescriptor.java |  8 +--
 .../hadoop/hbase/client/TestAsyncProcess.java   | 13 ++--
 .../hadoop/hbase/client/TestClientScanner.java  | 26 +++
 .../hadoop/hbase/client/TestDelayingRunner.java |  9 +--
 .../hadoop/hbase/client/TestOperation.java  | 76 +++-
 .../client/TestSimpleRequestController.java |  7 +-
 .../hbase/security/TestHBaseSaslRpcClient.java  | 27 +++
 .../org/apache/hadoop/hbase/util/Base64.java|  2 +-
 .../org/apache/hadoop/hbase/TestCellUtil.java   | 31 
 .../org/apache/hadoop/hbase/TestTableName.java  |  7 +-
 .../io/crypto/TestKeyStoreKeyProvider.java  |  4 +-
 .../apache/hadoop/hbase/types/TestStruct.java   | 14 ++--
 .../hbase/util/TestLoadTestKVGenerator.java |  9 ++-
 .../hadoop/hbase/util/TestOrderedBytes.java | 33 +
 .../hbase/client/example/HttpProxyExample.java  |  3 +-
 .../hbase/mapreduce/HFileOutputFormat2.java | 13 ++--
 18 files changed, 120 insertions(+), 172 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7145d981/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index af3916d..63310e6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -23,7 +23,6 @@ import com.google.protobuf.RpcController;
 import java.io.Closeable;
 import java.io.IOException;
 import java.io.InterruptedIOException;
-import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.EnumSet;
@@ -1675,8 +1674,8 @@ public class HBaseAdmin implements Admin {
 byte[][] encodedNameofRegionsToMerge = new 
byte[nameofRegionsToMerge.length][];
 for(int i = 0; i < nameofRegionsToMerge.length; i++) {
   encodedNameofRegionsToMerge[i] = 
HRegionInfo.isEncodedRegionName(nameofRegionsToMerge[i]) ?
-nameofRegionsToMerge[i] : 
HRegionInfo.encodeRegionName(nameofRegionsToMerge[i])
-  .getBytes(StandardCharsets.UTF_8);
+  nameofRegionsToMerge[i] :
+  Bytes.toBytes(HRegionInfo.encodeRegionName(nameofRegionsToMerge[i]));
 }
 
 TableName tableName = null;
@@ -1774,7 +1773,7 @@ public class HBaseAdmin implements Admin {
   public Future splitRegionAsync(byte[] regionName, byte[] splitPoint)
   throws IOException {
 byte[] encodedNameofRegionToSplit = 
HRegionInfo.isEncodedRegionName(regionName) ?
-regionName : 
HRegionInfo.encodeRegionName(regionName).getBytes(StandardCharsets.UTF_8);
+regionName : Bytes.toBytes(HRegionInfo.encodeRegionName(regionName));
 Pair pair = getRegion(regionName);
 if (pair != null) {
   if (pair.getFirst() != null &&

http://git-wip-us.apache.org/repos/asf/hbase/blob/7145d981/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
index d37abdf..7091df5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
@@ -28,6 +28,7 @@ import javax.security.sasl.SaslException;
 import javax.security.sasl.SaslServer;
 
 import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -73,7 +74,7 @@ public class SaslUtil {
   }
 
   static byte[] decodeIdentifier(String identifier) {
-return 

hbase git commit: HBASE-19545 Replace getBytes(StandardCharsets.UTF_8) with Bytes.toBytes

2017-12-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 ccc3b2bbc -> 15ed74828


HBASE-19545 Replace getBytes(StandardCharsets.UTF_8) with Bytes.toBytes

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/15ed7482
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/15ed7482
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/15ed7482

Branch: refs/heads/branch-2
Commit: 15ed7482861ace64d09c8bc6fa9f656feeec2a1f
Parents: ccc3b2b
Author: Peter Somogyi 
Authored: Wed Dec 20 21:17:52 2017 +0100
Committer: Chia-Ping Tsai 
Committed: Wed Dec 27 20:10:50 2017 +0800

--
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |  7 +-
 .../apache/hadoop/hbase/security/SaslUtil.java  |  3 +-
 .../hadoop/hbase/TestHColumnDescriptor.java |  8 +--
 .../hadoop/hbase/client/TestAsyncProcess.java   | 13 ++--
 .../hadoop/hbase/client/TestClientScanner.java  | 26 +++
 .../hadoop/hbase/client/TestDelayingRunner.java |  9 +--
 .../hadoop/hbase/client/TestOperation.java  | 76 +++-
 .../client/TestSimpleRequestController.java |  7 +-
 .../hbase/security/TestHBaseSaslRpcClient.java  | 27 +++
 .../org/apache/hadoop/hbase/util/Base64.java|  2 +-
 .../org/apache/hadoop/hbase/TestCellUtil.java   | 31 
 .../org/apache/hadoop/hbase/TestTableName.java  |  7 +-
 .../io/crypto/TestKeyStoreKeyProvider.java  |  4 +-
 .../apache/hadoop/hbase/types/TestStruct.java   | 14 ++--
 .../hbase/util/TestLoadTestKVGenerator.java |  9 ++-
 .../hadoop/hbase/util/TestOrderedBytes.java | 33 +
 .../hbase/client/example/HttpProxyExample.java  |  3 +-
 .../hbase/mapreduce/HFileOutputFormat2.java | 13 ++--
 18 files changed, 120 insertions(+), 172 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/15ed7482/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index af3916d..63310e6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -23,7 +23,6 @@ import com.google.protobuf.RpcController;
 import java.io.Closeable;
 import java.io.IOException;
 import java.io.InterruptedIOException;
-import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.EnumSet;
@@ -1675,8 +1674,8 @@ public class HBaseAdmin implements Admin {
 byte[][] encodedNameofRegionsToMerge = new 
byte[nameofRegionsToMerge.length][];
 for(int i = 0; i < nameofRegionsToMerge.length; i++) {
   encodedNameofRegionsToMerge[i] = 
HRegionInfo.isEncodedRegionName(nameofRegionsToMerge[i]) ?
-nameofRegionsToMerge[i] : 
HRegionInfo.encodeRegionName(nameofRegionsToMerge[i])
-  .getBytes(StandardCharsets.UTF_8);
+  nameofRegionsToMerge[i] :
+  Bytes.toBytes(HRegionInfo.encodeRegionName(nameofRegionsToMerge[i]));
 }
 
 TableName tableName = null;
@@ -1774,7 +1773,7 @@ public class HBaseAdmin implements Admin {
   public Future splitRegionAsync(byte[] regionName, byte[] splitPoint)
   throws IOException {
 byte[] encodedNameofRegionToSplit = 
HRegionInfo.isEncodedRegionName(regionName) ?
-regionName : 
HRegionInfo.encodeRegionName(regionName).getBytes(StandardCharsets.UTF_8);
+regionName : Bytes.toBytes(HRegionInfo.encodeRegionName(regionName));
 Pair pair = getRegion(regionName);
 if (pair != null) {
   if (pair.getFirst() != null &&

http://git-wip-us.apache.org/repos/asf/hbase/blob/15ed7482/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
index d37abdf..7091df5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
@@ -28,6 +28,7 @@ import javax.security.sasl.SaslException;
 import javax.security.sasl.SaslServer;
 
 import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -73,7 +74,7 @@ public class SaslUtil {
   }
 
   static byte[] decodeIdentifier(String identifier) {
-

hbase git commit: HBASE-19545 Replace getBytes(StandardCharsets.UTF_8) with Bytes.toBytes

2017-12-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1 c3bf558b6 -> 528eb1082


HBASE-19545 Replace getBytes(StandardCharsets.UTF_8) with Bytes.toBytes

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/528eb108
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/528eb108
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/528eb108

Branch: refs/heads/branch-1
Commit: 528eb10826503d3ba6d7f8305039e25775f123ed
Parents: c3bf558
Author: Peter Somogyi 
Authored: Thu Dec 21 15:41:14 2017 +0100
Committer: Chia-Ping Tsai 
Committed: Wed Dec 27 20:15:01 2017 +0800

--
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |  5 +-
 .../apache/hadoop/hbase/security/SaslUtil.java  |  3 +-
 .../hadoop/hbase/TestHColumnDescriptor.java |  7 +-
 .../hadoop/hbase/client/TestAsyncProcess.java   | 26 +++---
 .../hadoop/hbase/client/TestClientScanner.java  | 30 +++
 .../hadoop/hbase/client/TestDelayingRunner.java |  9 +-
 .../hadoop/hbase/client/TestOperation.java  | 89 
 .../hbase/security/TestHBaseSaslRpcClient.java  | 23 ++---
 .../org/apache/hadoop/hbase/util/Base64.java| 12 +--
 .../org/apache/hadoop/hbase/TestCellUtil.java   | 33 
 .../io/crypto/TestKeyStoreKeyProvider.java  |  4 +-
 .../apache/hadoop/hbase/types/TestStruct.java   | 16 ++--
 .../hbase/util/TestLoadTestKVGenerator.java | 12 ++-
 .../hadoop/hbase/util/TestOrderedBytes.java | 63 +++---
 .../hadoop/hbase/rest/TestGzipFilter.java   |  5 +-
 .../hbase/rest/client/TestXmlParsing.java   |  6 +-
 .../hbase/mapreduce/HFileOutputFormat2.java | 12 ++-
 .../hadoop/hbase/regionserver/HRegion.java  |  2 +-
 .../hbase/regionserver/HRegionServer.java   | 32 +++
 .../hbase/zookeeper/MiniZooKeeperCluster.java   | 15 ++--
 20 files changed, 174 insertions(+), 230 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 051a768..507c6fb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -24,7 +24,6 @@ import com.google.protobuf.ServiceException;
 
 import java.io.IOException;
 import java.io.InterruptedIOException;
-import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -2612,9 +2611,9 @@ public class HBaseAdmin implements Admin {
   final byte[] nameOfRegionB, final boolean forcible)
   throws IOException {
 final byte[] encodedNameOfRegionA = isEncodedRegionName(nameOfRegionA) ? 
nameOfRegionA :
-  
HRegionInfo.encodeRegionName(nameOfRegionA).getBytes(StandardCharsets.UTF_8);
+  Bytes.toBytes(HRegionInfo.encodeRegionName(nameOfRegionA));
 final byte[] encodedNameOfRegionB = isEncodedRegionName(nameOfRegionB) ? 
nameOfRegionB :
-  
HRegionInfo.encodeRegionName(nameOfRegionB).getBytes(StandardCharsets.UTF_8);
+  Bytes.toBytes(HRegionInfo.encodeRegionName(nameOfRegionB));
 
 Pair pair = getRegion(nameOfRegionA);
 if (pair != null && pair.getFirst().getReplicaId() != 
HRegionInfo.DEFAULT_REPLICA_ID)

http://git-wip-us.apache.org/repos/asf/hbase/blob/528eb108/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
index b26dcac..1516a6b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
@@ -30,6 +30,7 @@ import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.util.Bytes;
 
 @InterfaceAudience.Private
 public class SaslUtil {
@@ -72,7 +73,7 @@ public class SaslUtil {
   }
 
   static byte[] decodeIdentifier(String identifier) {
-return Base64.decodeBase64(identifier.getBytes(StandardCharsets.UTF_8));
+return Base64.decodeBase64(Bytes.toBytes(identifier));
   }
 
   static char[] encodePassword(byte[] password) {


hbase git commit: HBASE-19545 Replace getBytes(StandardCharsets.UTF_8) with Bytes.toBytes

2017-12-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 0ca69fae3 -> b54391313


HBASE-19545 Replace getBytes(StandardCharsets.UTF_8) with Bytes.toBytes

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b5439131
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b5439131
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b5439131

Branch: refs/heads/branch-1.4
Commit: b54391313a10fba83c69ed5d332c0198da6dd95b
Parents: 0ca69fa
Author: Peter Somogyi 
Authored: Thu Dec 21 15:41:14 2017 +0100
Committer: Chia-Ping Tsai 
Committed: Wed Dec 27 20:17:22 2017 +0800

--
 .../apache/hadoop/hbase/client/HBaseAdmin.java  |  5 +-
 .../apache/hadoop/hbase/security/SaslUtil.java  |  3 +-
 .../hadoop/hbase/TestHColumnDescriptor.java |  7 +-
 .../hadoop/hbase/client/TestAsyncProcess.java   | 26 +++---
 .../hadoop/hbase/client/TestClientScanner.java  | 30 +++
 .../hadoop/hbase/client/TestDelayingRunner.java |  9 +-
 .../hadoop/hbase/client/TestOperation.java  | 89 
 .../hbase/security/TestHBaseSaslRpcClient.java  | 23 ++---
 .../org/apache/hadoop/hbase/util/Base64.java| 12 +--
 .../org/apache/hadoop/hbase/TestCellUtil.java   | 33 
 .../io/crypto/TestKeyStoreKeyProvider.java  |  4 +-
 .../apache/hadoop/hbase/types/TestStruct.java   | 16 ++--
 .../hbase/util/TestLoadTestKVGenerator.java | 12 ++-
 .../hadoop/hbase/util/TestOrderedBytes.java | 63 +++---
 .../hadoop/hbase/rest/TestGzipFilter.java   |  5 +-
 .../hbase/rest/client/TestXmlParsing.java   |  6 +-
 .../hbase/mapreduce/HFileOutputFormat2.java | 12 ++-
 .../hadoop/hbase/regionserver/HRegion.java  |  2 +-
 .../hbase/regionserver/HRegionServer.java   | 32 +++
 .../hbase/zookeeper/MiniZooKeeperCluster.java   | 15 ++--
 20 files changed, 174 insertions(+), 230 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b5439131/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 051a768..507c6fb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -24,7 +24,6 @@ import com.google.protobuf.ServiceException;
 
 import java.io.IOException;
 import java.io.InterruptedIOException;
-import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -2612,9 +2611,9 @@ public class HBaseAdmin implements Admin {
   final byte[] nameOfRegionB, final boolean forcible)
   throws IOException {
 final byte[] encodedNameOfRegionA = isEncodedRegionName(nameOfRegionA) ? 
nameOfRegionA :
-  
HRegionInfo.encodeRegionName(nameOfRegionA).getBytes(StandardCharsets.UTF_8);
+  Bytes.toBytes(HRegionInfo.encodeRegionName(nameOfRegionA));
 final byte[] encodedNameOfRegionB = isEncodedRegionName(nameOfRegionB) ? 
nameOfRegionB :
-  
HRegionInfo.encodeRegionName(nameOfRegionB).getBytes(StandardCharsets.UTF_8);
+  Bytes.toBytes(HRegionInfo.encodeRegionName(nameOfRegionB));
 
 Pair pair = getRegion(nameOfRegionA);
 if (pair != null && pair.getFirst().getReplicaId() != 
HRegionInfo.DEFAULT_REPLICA_ID)

http://git-wip-us.apache.org/repos/asf/hbase/blob/b5439131/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
index b26dcac..1516a6b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
@@ -30,6 +30,7 @@ import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.util.Bytes;
 
 @InterfaceAudience.Private
 public class SaslUtil {
@@ -72,7 +73,7 @@ public class SaslUtil {
   }
 
   static byte[] decodeIdentifier(String identifier) {
-return Base64.decodeBase64(identifier.getBytes(StandardCharsets.UTF_8));
+return Base64.decodeBase64(Bytes.toBytes(identifier));
   }
 
   static char[] encodePassword(byte[] password) {


hbase git commit: HBASE-19648 Move branch-2 version from 2.0.0-beta-1-SNAPSHOT to 2.0.0-beta-1

2017-12-27 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 e4ab29ada -> d6d836965


HBASE-19648 Move branch-2 version from 2.0.0-beta-1-SNAPSHOT to 2.0.0-beta-1


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d6d83696
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d6d83696
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d6d83696

Branch: refs/heads/branch-2
Commit: d6d8369655cd8aece188362bb2503428903973d2
Parents: e4ab29a
Author: Michael Stack 
Authored: Wed Dec 27 14:41:19 2017 -0800
Committer: Michael Stack 
Committed: Wed Dec 27 14:41:19 2017 -0800

--
 hbase-annotations/pom.xml| 2 +-
 hbase-archetypes/hbase-archetype-builder/pom.xml | 2 +-
 hbase-archetypes/hbase-client-project/pom.xml| 2 +-
 hbase-archetypes/hbase-shaded-client-project/pom.xml | 2 +-
 hbase-archetypes/pom.xml | 2 +-
 hbase-assembly/pom.xml   | 2 +-
 hbase-build-configuration/pom.xml| 2 +-
 hbase-build-support/hbase-error-prone/pom.xml| 4 ++--
 hbase-build-support/pom.xml  | 2 +-
 hbase-checkstyle/pom.xml | 4 ++--
 hbase-client/pom.xml | 2 +-
 hbase-common/pom.xml | 2 +-
 hbase-endpoint/pom.xml   | 2 +-
 hbase-examples/pom.xml   | 2 +-
 hbase-external-blockcache/pom.xml| 2 +-
 hbase-hadoop-compat/pom.xml  | 2 +-
 hbase-hadoop2-compat/pom.xml | 2 +-
 hbase-http/pom.xml   | 2 +-
 hbase-it/pom.xml | 2 +-
 hbase-mapreduce/pom.xml  | 2 +-
 hbase-metrics-api/pom.xml| 2 +-
 hbase-metrics/pom.xml| 2 +-
 hbase-procedure/pom.xml  | 2 +-
 hbase-protocol-shaded/pom.xml| 2 +-
 hbase-protocol/pom.xml   | 2 +-
 hbase-replication/pom.xml| 2 +-
 hbase-resource-bundle/pom.xml| 2 +-
 hbase-rest/pom.xml   | 2 +-
 hbase-rsgroup/pom.xml| 2 +-
 hbase-server/pom.xml | 2 +-
 hbase-shaded/hbase-shaded-check-invariants/pom.xml   | 2 +-
 hbase-shaded/hbase-shaded-client/pom.xml | 2 +-
 hbase-shaded/hbase-shaded-mapreduce/pom.xml  | 2 +-
 hbase-shaded/pom.xml | 2 +-
 hbase-shell/pom.xml  | 2 +-
 hbase-testing-util/pom.xml   | 2 +-
 hbase-thrift/pom.xml | 2 +-
 hbase-zookeeper/pom.xml  | 2 +-
 pom.xml  | 2 +-
 39 files changed, 41 insertions(+), 41 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d6d83696/hbase-annotations/pom.xml
--
diff --git a/hbase-annotations/pom.xml b/hbase-annotations/pom.xml
index 675e95c..6ad3c12 100644
--- a/hbase-annotations/pom.xml
+++ b/hbase-annotations/pom.xml
@@ -23,7 +23,7 @@
   
 hbase
 org.apache.hbase
-2.0.0-beta-1-SNAPSHOT
+2.0.0-beta-1
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6d83696/hbase-archetypes/hbase-archetype-builder/pom.xml
--
diff --git a/hbase-archetypes/hbase-archetype-builder/pom.xml 
b/hbase-archetypes/hbase-archetype-builder/pom.xml
index bcc0a83..297bc67 100644
--- a/hbase-archetypes/hbase-archetype-builder/pom.xml
+++ b/hbase-archetypes/hbase-archetype-builder/pom.xml
@@ -25,7 +25,7 @@
   
 hbase-archetypes
 org.apache.hbase
-2.0.0-beta-1-SNAPSHOT
+2.0.0-beta-1
 ..
   
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6d83696/hbase-archetypes/hbase-client-project/pom.xml
--
diff --git a/hbase-archetypes/hbase-client-project/pom.xml 
b/hbase-archetypes/hbase-client-project/pom.xml
index 9c8cd9b..375bc63 100644
--- a/hbase-archetypes/hbase-client-project/pom.xml
+++ b/hbase-archetypes/hbase-client-project/pom.xml
@@ -26,7 +26,7 @@
   
 hbase-archetypes
 org.apache.hbase
-2.0.0-beta-1-SNAPSHOT
+2.0.0-beta-1
 ..
   
   hbase-client-project

http://git-wip-us.apache.org/repos/asf/hbase/blob/d6d83696/hbase-archetypes/hbase-shaded-client-project/pom.xml

hbase git commit: HBASE-19647 Logging cleanups; emit regionname when RegionTooBusyException inside RetriesExhausted... make netty connect/disconnect TRACE-level; ADDENDUM

2017-12-27 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 7dee1bcd3 -> e4ab29ada


HBASE-19647 Logging cleanups; emit regionname when RegionTooBusyException 
inside RetriesExhausted... make netty connect/disconnect TRACE-level; ADDENDUM


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e4ab29ad
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e4ab29ad
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e4ab29ad

Branch: refs/heads/branch-2
Commit: e4ab29ada2d8acccd5b6a4a87ccaf5fbba944ca2
Parents: 7dee1bc
Author: Michael Stack 
Authored: Wed Dec 27 14:14:07 2017 -0800
Committer: Michael Stack 
Committed: Wed Dec 27 14:14:37 2017 -0800

--
 .../hadoop/hbase/RegionTooBusyException.java | 11 +--
 .../RetriesExhaustedWithDetailsException.java|  2 ++
 .../hadoop/hbase/regionserver/HRegion.java   | 19 ++-
 .../hadoop/hbase/client/TestMetaCache.java   |  2 +-
 .../apache/hadoop/hbase/ipc/TestRpcMetrics.java  |  2 +-
 5 files changed, 19 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e4ab29ad/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
index 7b03b45..49431b6 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
@@ -24,21 +24,20 @@ import org.apache.yetus.audience.InterfaceAudience;
 /**
  * Thrown by a region server if it will block and wait to serve a request.
  * For example, the client wants to insert something to a region while the
- * region is compacting.
+ * region is compacting. Keep variance in the passed 'msg' low because its msg 
is used as a key
+ * over in {@link 
org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException}
+ * grouping failure types.
  */
 @InterfaceAudience.Public
 public class RegionTooBusyException extends IOException {
   private static final long serialVersionUID = 1728345723728342L;
 
-  /** default constructor */
-  public RegionTooBusyException() {
-super();
-  }
-
   /**
* Constructor
* @param msg message
*/
+  // Be careful. Keep variance in the passed 'msg' low because its msg is used 
as a key over in
+  // RetriesExhaustedWithDetailsException grouping failure types.
   public RegionTooBusyException(final String msg) {
 super(msg);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/e4ab29ad/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
index e7eda2a..0cce728 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
@@ -157,6 +157,8 @@ extends RetriesExhaustedException {
   if (t instanceof DoNotRetryIOException ||
   t instanceof RegionTooBusyException) {
 // If RegionTooBusyException, print message since it has Region name 
in it.
+// RegionTooBusyException message was edited to remove variance. Has 
regionname, server,
+// and why the exception; no longer has duration it waited on lock nor 
current memsize.
 name = t.getMessage();
   } else {
 name = t.getClass().getSimpleName();

http://git-wip-us.apache.org/repos/asf/hbase/blob/e4ab29ad/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index cde6874..ca1bfd3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -4170,12 +4170,12 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 if (this.memstoreDataSize.get() > this.blockingMemStoreSize) {
   blockedRequestsCount.increment();
   requestFlush();
-  throw new RegionTooBusyException("Over memstore limit, " +
-  

hbase git commit: HBASE-19647 Logging cleanups; emit regionname when RegionTooBusyException inside RetriesExhausted... make netty connect/disconnect TRACE-level; ADDENDUM

2017-12-27 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 01b1f48cc -> 2c65f039e


HBASE-19647 Logging cleanups; emit regionname when RegionTooBusyException 
inside RetriesExhausted... make netty connect/disconnect TRACE-level; ADDENDUM


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2c65f039
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2c65f039
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2c65f039

Branch: refs/heads/master
Commit: 2c65f039eddb6733ad2ac2fb89a8f5427495e243
Parents: 01b1f48
Author: Michael Stack 
Authored: Wed Dec 27 14:14:07 2017 -0800
Committer: Michael Stack 
Committed: Wed Dec 27 14:14:58 2017 -0800

--
 .../hadoop/hbase/RegionTooBusyException.java | 11 +--
 .../RetriesExhaustedWithDetailsException.java|  2 ++
 .../hadoop/hbase/regionserver/HRegion.java   | 19 ++-
 .../hadoop/hbase/client/TestMetaCache.java   |  2 +-
 .../apache/hadoop/hbase/ipc/TestRpcMetrics.java  |  2 +-
 5 files changed, 19 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2c65f039/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
index 7b03b45..49431b6 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
@@ -24,21 +24,20 @@ import org.apache.yetus.audience.InterfaceAudience;
 /**
  * Thrown by a region server if it will block and wait to serve a request.
  * For example, the client wants to insert something to a region while the
- * region is compacting.
+ * region is compacting. Keep variance in the passed 'msg' low because its msg 
is used as a key
+ * over in {@link 
org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException}
+ * grouping failure types.
  */
 @InterfaceAudience.Public
 public class RegionTooBusyException extends IOException {
   private static final long serialVersionUID = 1728345723728342L;
 
-  /** default constructor */
-  public RegionTooBusyException() {
-super();
-  }
-
   /**
* Constructor
* @param msg message
*/
+  // Be careful. Keep variance in the passed 'msg' low because its msg is used 
as a key over in
+  // RetriesExhaustedWithDetailsException grouping failure types.
   public RegionTooBusyException(final String msg) {
 super(msg);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/2c65f039/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
index e7eda2a..0cce728 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
@@ -157,6 +157,8 @@ extends RetriesExhaustedException {
   if (t instanceof DoNotRetryIOException ||
   t instanceof RegionTooBusyException) {
 // If RegionTooBusyException, print message since it has Region name 
in it.
+// RegionTooBusyException message was edited to remove variance. Has 
regionname, server,
+// and why the exception; no longer has duration it waited on lock nor 
current memsize.
 name = t.getMessage();
   } else {
 name = t.getClass().getSimpleName();

http://git-wip-us.apache.org/repos/asf/hbase/blob/2c65f039/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index cde6874..ca1bfd3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -4170,12 +4170,12 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
 if (this.memstoreDataSize.get() > this.blockingMemStoreSize) {
   blockedRequestsCount.increment();
   requestFlush();
-  throw new RegionTooBusyException("Over memstore limit, " +
-  

hbase git commit: HBASE-19652 Turn down CleanerChore logging; too chatty

2017-12-27 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 2c65f039e -> 1050936ce


HBASE-19652 Turn down CleanerChore logging; too chatty


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1050936c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1050936c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1050936c

Branch: refs/heads/master
Commit: 1050936ce7fc8e6466453e64d59597443ae07152
Parents: 2c65f03
Author: Michael Stack 
Authored: Wed Dec 27 20:49:26 2017 -0800
Committer: Michael Stack 
Committed: Wed Dec 27 20:49:26 2017 -0800

--
 .../hadoop/hbase/RegionTooBusyException.java|  4 --
 .../hbase/client/AsyncRequestFutureImpl.java|  4 +-
 .../hbase/master/cleaner/CleanerChore.java  | 54 ++--
 .../hadoop/hbase/regionserver/HRegion.java  |  5 +-
 .../hbase/regionserver/MemStoreFlusher.java |  5 +-
 .../PressureAwareThroughputController.java  |  2 +-
 .../throttle/ThroughputControlUtil.java |  2 +-
 7 files changed, 25 insertions(+), 51 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1050936c/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
index 49431b6..3024962 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/RegionTooBusyException.java
@@ -32,10 +32,6 @@ import org.apache.yetus.audience.InterfaceAudience;
 public class RegionTooBusyException extends IOException {
   private static final long serialVersionUID = 1728345723728342L;
 
-  /**
-   * Constructor
-   * @param msg message
-   */
   // Be careful. Keep variance in the passed 'msg' low because its msg is used 
as a key over in
   // RetriesExhaustedWithDetailsException grouping failure types.
   public RegionTooBusyException(final String msg) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/1050936c/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
index ed1bdb3..d214dca 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncRequestFutureImpl.java
@@ -977,12 +977,12 @@ class AsyncRequestFutureImpl implements 
AsyncRequestFuture {
Throwable error, long backOffTime, boolean 
willRetry, String startTime,
int failed, int stopped) {
 StringBuilder sb = new StringBuilder();
-sb.append("#").append(asyncProcess.id).append(", 
table=").append(tableName).append(", ")
+sb.append("id=").append(asyncProcess.id).append(", 
table=").append(tableName).append(", ")
 .append("attempt=").append(numAttempt)
 .append("/").append(asyncProcess.numTries).append(" ");
 
 if (failureCount > 0 || error != null){
-  sb.append("failed=").append(failureCount).append("ops").append(", last 
exception: ").
+  sb.append("failed=").append(failureCount).append("ops").append(", last 
exception=").
   append(error == null ? "null" : error);
 } else {
   sb.append("succeeded");

http://git-wip-us.apache.org/repos/asf/hbase/blob/1050936c/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
index bc7c82d..abf132c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/cleaner/CleanerChore.java
@@ -125,7 +125,7 @@ public abstract class CleanerChore extends Schedu
   // but upmost to the number of available processors.
   int size = Math.min(Integer.valueOf(poolSize), AVAIL_PROCESSORS);
   if (size == AVAIL_PROCESSORS) {
-LOG.warn("Use full core processors to scan dir");
+LOG.warn("Use full core processors to scan dir, size={}" + size);
   }
   return size;
 } else if (poolSize.matches("0.[0-9]+|1.0")) {
@@ -157,7 +157,7 @@ public abstract class 

  1   2   >