svn commit: r23423 - /dev/hbase/hbase-1.4.0RC0/

2017-12-06 Thread apurtell
Author: apurtell
Date: Thu Dec  7 02:41:37 2017
New Revision: 23423

Log:
Restage HBase 1.4.0 RC0

Modified:
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.asc
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.md5
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.sha
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.asc
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.md5
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.sha

Modified: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz
==
Binary files - no diff available.

Modified: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.asc
==
--- dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.asc (original)
+++ dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.asc Thu Dec  7 02:41:37 2017
@@ -1,16 +1,16 @@
 -BEGIN PGP SIGNATURE-
 
-iQIzBAABCAAdFiEEUPHou3xnqxS9/AohhZd1TdU2XM0FAloogf8ACgkQhZd1TdU2
-XM3PcA/+K8tMPn2rgvfA0Vk2aOFJFvEncXUGbsY6qvQtb8I2doGarfDbn1YUPKTS
-KeZ4+QIptOOzJ4mbOAGf4JT2yVUc/102oPtFKuHR/zkowv3PQv5R3f1jF/a46WY7
-4y2D6Y4BfQz9hH0WKK0kL0kimRLaycBsEMoyy5YCUN+CQS0r8UHLkkiaOiM+zu22
-AjwM629Hlv+Xp0sacx+FXLz3cBTVtylabZ0W2PQEJofYgjVk+JbuhyhOD+yToYcu
-OjtrSI2QQ8E6w9YMB6jIjvJSSO0jSFhsfAwMFAmZjfjJwZ00M+3q1lPc2C0TPHEL
-xahYKFrElg3ary2jnqiLYUn1X5a63Q6qQ9snsFtpRr5Gh+A3q4xtkG5piZMJnaOF
-1kyJ+QYrX2A4pl4BUOa76uiGZywk0pPCrDYzHFnwCqMB4ka/yKSI1kizezlED65i
-1mREfcLP4FvDbsJAeHDQo4xIia827dqmns/X3vP1PWQGfCP1sB9bN+zaZXGFQFHa
-CpVXvSoDrBrUn+JWpO0KsbWYKH1a+w8olNXTCEu/0O05B7cdOcnuPwrCHsZ0pRB1
-O/KC4hgJtrVRCFksMWS6NDPvLVJIOkUu4smq6EeLvDmqOR/8Jc7qWjfexbH2rz9T
-c+K6m782nKvFeQ4ks5HY9iwPncz/2pcKJMnhZRkAx8YcvGHF6B8=
-=bqg8
+iQIzBAABCAAdFiEEUPHou3xnqxS9/AohhZd1TdU2XM0FAlooqR0ACgkQhZd1TdU2
+XM3aSA//QnfDgKOlUGLEtOmKC8wUzeTO+J6y3z37VOeuWfjsSBhQvCEnTddhGe8R
++ZyopgHOt5fgF9LIFInLRoHS/AVk0OblDaSY9oZHytIcmTnY4EL1BERr2Y3J6Sqd
+xYymkkMu8DucwpxesjqbiSi6ERtkaCb8xVP6uC96GGVyIItJlbPZ5UIq8LjPoS1B
+o8OyUYX8ohaVYJK7/UdYauqfYU0k3TVZUiLChP4CDQBc64JpnqN+b/jWs/MnRbMq
+tAZ8YroW85jU43M+UZnUJ41A8Z5YteblAB/VMCiaFNkq0ZHjTKIevu7s3EJa6Bgr
++pgBoUABZQS8UD77x6VJKPzR4kcvtrJuJ1nxcLIi7h/g7KftLMlLd5SAUJhQtbwV
+CKCxwT4ujH1zSFtIf8/uLhXiNq8xDyMwanTh1AqNwAMNpgrc4ZmtS0l1OYcnchev
+dEPrQvI0LD7VlI4/oXEnIavpn0iAdJz08+b6BnPvB/o3FI/cm/IX+yShCg/V5xKf
+FN5WPCi2ThT+mOFdiKg4z7vZ+5Gnb5ArxfZAIQU6AzphvGJ2W17/DFUqvCkfRFX1
+gZ6uNy+lol5Riyw+HyddMVrIJ5SOJ4Vtl352DQ6rxj2ElVQcNFan+gHvLhHLzvrv
+fkfbdIvmA/YItAWoNzm/eorOExPPq+rf3YzLxFAbHPBkhEprJmM=
+=WEuB
 -END PGP SIGNATURE-

Modified: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.md5
==
--- dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.md5 (original)
+++ dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.md5 Thu Dec  7 02:41:37 2017
@@ -1 +1 @@
-hbase-1.4.0-bin.tar.gz: AF 33 98 CE 9F D1 E2 BA  D8 54 0E 7B 0E 72 B1 A1
+hbase-1.4.0-bin.tar.gz: CF 1D 0E 71 11 90 81 13  53 B9 9D 81 C5 86 E4 BC

Modified: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.sha
==
--- dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.sha (original)
+++ dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.sha Thu Dec  7 02:41:37 2017
@@ -1,3 +1,3 @@
-hbase-1.4.0-bin.tar.gz: 3C3699EE 27FEF0CF 64D88201 44BD0988 F700E76B 07BF7C69
-235E8697 8EEE6305 520EB712 4B0AA119 D39AA1AB D5D503F6
-B8A66354 39935E4A FC31FAEC 6EB9F885
+hbase-1.4.0-bin.tar.gz: 760B674E 935C4D5C EDA7237F 5FE326BC 84AD618D 6BA7C764
+E2EC0859 41311DC9 CDC27CB2 973309D9 086A8A38 67F9E46B
+0642B226 EEA5FA18 BC0B08EA C2187F14

Modified: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz
==
Binary files - no diff available.

Modified: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.asc
==
--- dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.asc (original)
+++ dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.asc Thu Dec  7 02:41:37 2017
@@ -1,16 +1,16 @@
 -BEGIN PGP SIGNATURE-
 
-iQIzBAABCAAdFiEEUPHou3xnqxS9/AohhZd1TdU2XM0FAloogf8ACgkQhZd1TdU2
-XM2AYA//S027Y5423WwlfeSXT9zrs5D5gVVUhyJZnHaGa9B86bi8MA+0wrA8pG7A
-Hj2z5I+fsIAtfgnLm4/FnSvG+3p+elYJs7mR3x4fRyFDoo6cMc6iNM/OTbFkysy5
-KJLzz748ydytojaqF9rP+jSn6AS4zDIuCpGpruXZR2aP5b2XhAMY5YtsdD6zZfG0
-1HbA21gHrTu/GbZRH/OXTlGgMs6YtCUPCHue/tJihrcbmBtX4VsB6b54Sp67A128
-kzAf1cGdoNvWGZT4d0NgCNo4v8Eo6tvJLnBCejAg3rPlTh6eKu3FZRdTwPAO8RKg
-UAwePRejteBrtULWdtHfpghAYWfWRXN4K9UR5bFkJPTGGWhMvhiGOgeyioLN1NDa
-zdMRIWigP1SiXxgwwLcYbhhgB4xfau5bYOr22h7fkYCVoWBun0cD13ppUCe/3/HN
-8B/Y209jzMHTx7GGUDpAYBpDsIkRPulsv0pZwIfmiG1PN4FfhzG8C03HBtsm3xeb

hbase git commit: HBASE-19340 Backport missing options in shell

2017-12-06 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 6d8c080b1 -> 2ff895c10


HBASE-19340 Backport missing options in shell

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2ff895c1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2ff895c1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2ff895c1

Branch: refs/heads/branch-1.3
Commit: 2ff895c10aa87baa8604a4b13eec028b18f7bef1
Parents: 6d8c080
Author: zhaoyuan 
Authored: Thu Dec 7 11:38:09 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Dec 7 14:13:07 2017 +0800

--
 hbase-shell/src/main/ruby/hbase/admin.rb  | 27 +++--
 hbase-shell/src/test/ruby/hbase/admin_test.rb | 46 +-
 2 files changed, 68 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2ff895c1/hbase-shell/src/main/ruby/hbase/admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb 
b/hbase-shell/src/main/ruby/hbase/admin.rb
index e6dd672..317acc8 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -362,7 +362,7 @@ module Hbase
 htd.setOwnerString(arg.delete(OWNER)) if arg[OWNER]
 htd.setMaxFileSize(JLong.valueOf(arg.delete(MAX_FILESIZE))) if 
arg[MAX_FILESIZE]
 htd.setReadOnly(JBoolean.valueOf(arg.delete(READONLY))) if 
arg[READONLY]
-htd.setCompactionEnabled(JBoolean.valueOf(arg[COMPACTION_ENABLED])) if 
arg[COMPACTION_ENABLED]
+
htd.setCompactionEnabled(JBoolean.valueOf(arg.delete(COMPACTION_ENABLED))) if 
arg[COMPACTION_ENABLED]
 
htd.setMemStoreFlushSize(JLong.valueOf(arg.delete(MEMSTORE_FLUSHSIZE))) if 
arg[MEMSTORE_FLUSHSIZE]
 # DEFERRED_LOG_FLUSH is deprecated and was replaced by DURABILITY.  To 
keep backward compatible, it still exists.
 # However, it has to be set before DURABILITY so that DURABILITY could 
overwrite if both args are set
@@ -374,6 +374,11 @@ module Hbase
   end
 end
 
htd.setDurability(org.apache.hadoop.hbase.client.Durability.valueOf(arg.delete(DURABILITY)))
 if arg[DURABILITY]
+htd.setPriority(JInteger.valueOf(arg.delete(PRIORITY))) if 
arg[PRIORITY]
+htd.setFlushPolicyClassName(arg.delete(FLUSH_POLICY)) if 
arg[FLUSH_POLICY]
+htd.setRegionSplitPolicyClassName(arg.delete(SPLIT_POLICY)) if 
arg[SPLIT_POLICY]
+
htd.setRegionMemstoreReplication(JBoolean.valueOf(arg.delete(REGION_MEMSTORE_REPLICATION)))
 if arg[REGION_MEMSTORE_REPLICATION]
+
htd.setRegionReplication(JInteger.valueOf(arg.delete(REGION_REPLICATION))) if 
arg[REGION_REPLICATION]
 parse_htd_args(htd, arg)
 set_user_metadata(htd, arg.delete(METADATA)) if arg[METADATA]
 set_descriptor_config(htd, arg.delete(CONFIGURATION)) if 
arg[CONFIGURATION]
@@ -616,7 +621,7 @@ module Hbase
 htd.setOwnerString(arg.delete(OWNER)) if arg[OWNER]
 htd.setMaxFileSize(JLong.valueOf(arg.delete(MAX_FILESIZE))) if 
arg[MAX_FILESIZE]
 htd.setReadOnly(JBoolean.valueOf(arg.delete(READONLY))) if 
arg[READONLY]
-htd.setCompactionEnabled(JBoolean.valueOf(arg[COMPACTION_ENABLED])) if 
arg[COMPACTION_ENABLED]
+
htd.setCompactionEnabled(JBoolean.valueOf(arg.delete(COMPACTION_ENABLED))) if 
arg[COMPACTION_ENABLED]
 parse_htd_args(htd, arg)
 
htd.setMemStoreFlushSize(JLong.valueOf(arg.delete(MEMSTORE_FLUSHSIZE))) if 
arg[MEMSTORE_FLUSHSIZE]
 # DEFERRED_LOG_FLUSH is deprecated and was replaced by DURABILITY.  To 
keep backward compatible, it still exists.
@@ -629,6 +634,10 @@ module Hbase
   end
 end
 
htd.setDurability(org.apache.hadoop.hbase.client.Durability.valueOf(arg.delete(DURABILITY)))
 if arg[DURABILITY]
+htd.setPriority(JInteger.valueOf(arg.delete(PRIORITY))) if 
arg[PRIORITY]
+htd.setFlushPolicyClassName(arg.delete(FLUSH_POLICY)) if 
arg[FLUSH_POLICY]
+htd.setRegionSplitPolicyClassName(arg.delete(SPLIT_POLICY)) if 
arg[SPLIT_POLICY]
+
htd.setRegionMemstoreReplication(JBoolean.valueOf(arg.delete(REGION_MEMSTORE_REPLICATION)))
 if arg[REGION_MEMSTORE_REPLICATION]
 
htd.setRegionReplication(JInteger.valueOf(arg.delete(REGION_REPLICATION))) if 
arg[REGION_REPLICATION]
 set_user_metadata(htd, arg.delete(METADATA)) if arg[METADATA]
 set_descriptor_config(htd, arg.delete(CONFIGURATION)) if 
arg[CONFIGURATION]
@@ -813,6 +822,10 @@ module Hbase
   
family.setBlockCacheEnabled(JBoolean.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::BLOCKCACHE)))
 if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::BLOCKCACHE)
   

hbase git commit: HBASE-19340 Backport missing options in shell

2017-12-06 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 a1e77f605 -> 45e230959


HBASE-19340 Backport missing options in shell

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/45e23095
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/45e23095
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/45e23095

Branch: refs/heads/branch-1.2
Commit: 45e230959bf141550cadd69a4c0a078c6fd1c2ee
Parents: a1e77f6
Author: zhaoyuan 
Authored: Wed Dec 6 14:24:46 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Dec 7 14:13:28 2017 +0800

--
 hbase-shell/src/main/ruby/hbase/admin.rb  | 15 ++--
 hbase-shell/src/test/ruby/hbase/admin_test.rb | 28 +++---
 2 files changed, 38 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/45e23095/hbase-shell/src/main/ruby/hbase/admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb 
b/hbase-shell/src/main/ruby/hbase/admin.rb
index a9e49c5..0fd1267 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -330,7 +330,7 @@ module Hbase
 htd.setOwnerString(arg.delete(OWNER)) if arg[OWNER]
 htd.setMaxFileSize(JLong.valueOf(arg.delete(MAX_FILESIZE))) if 
arg[MAX_FILESIZE]
 htd.setReadOnly(JBoolean.valueOf(arg.delete(READONLY))) if 
arg[READONLY]
-htd.setCompactionEnabled(JBoolean.valueOf(arg[COMPACTION_ENABLED])) if 
arg[COMPACTION_ENABLED]
+
htd.setCompactionEnabled(JBoolean.valueOf(arg.delete[COMPACTION_ENABLED])) if 
arg[COMPACTION_ENABLED]
 
htd.setMemStoreFlushSize(JLong.valueOf(arg.delete(MEMSTORE_FLUSHSIZE))) if 
arg[MEMSTORE_FLUSHSIZE]
 # DEFERRED_LOG_FLUSH is deprecated and was replaced by DURABILITY.  To 
keep backward compatible, it still exists.
 # However, it has to be set before DURABILITY so that DURABILITY could 
overwrite if both args are set
@@ -342,6 +342,10 @@ module Hbase
   end
 end
 
htd.setDurability(org.apache.hadoop.hbase.client.Durability.valueOf(arg.delete(DURABILITY)))
 if arg[DURABILITY]
+htd.setFlushPolicyClassName(arg.delete(FLUSH_POLICY)) if 
arg[FLUSH_POLICY]
+htd.setRegionSplitPolicyClassName(arg.delete(SPLIT_POLICY)) if 
arg[SPLIT_POLICY]
+
htd.setRegionMemstoreReplication(JBoolean.valueOf(arg.delete(REGION_MEMSTORE_REPLICATION)))
 if arg[REGION_MEMSTORE_REPLICATION]
+
htd.setRegionReplication(JInteger.valueOf(arg.delete(REGION_REPLICATION))) if 
arg[REGION_REPLICATION]
 parse_htd_args(htd, arg)
 set_user_metadata(htd, arg.delete(METADATA)) if arg[METADATA]
 set_descriptor_config(htd, arg.delete(CONFIGURATION)) if 
arg[CONFIGURATION]
@@ -598,7 +602,7 @@ module Hbase
 htd.setOwnerString(arg.delete(OWNER)) if arg[OWNER]
 htd.setMaxFileSize(JLong.valueOf(arg.delete(MAX_FILESIZE))) if 
arg[MAX_FILESIZE]
 htd.setReadOnly(JBoolean.valueOf(arg.delete(READONLY))) if 
arg[READONLY]
-htd.setCompactionEnabled(JBoolean.valueOf(arg[COMPACTION_ENABLED])) if 
arg[COMPACTION_ENABLED]
+
htd.setCompactionEnabled(JBoolean.valueOf(arg.delete[COMPACTION_ENABLED])) if 
arg[COMPACTION_ENABLED]
 parse_htd_args(htd, arg)
 
htd.setMemStoreFlushSize(JLong.valueOf(arg.delete(MEMSTORE_FLUSHSIZE))) if 
arg[MEMSTORE_FLUSHSIZE]
 # DEFERRED_LOG_FLUSH is deprecated and was replaced by DURABILITY.  To 
keep backward compatible, it still exists.
@@ -611,6 +615,9 @@ module Hbase
   end
 end
 
htd.setDurability(org.apache.hadoop.hbase.client.Durability.valueOf(arg.delete(DURABILITY)))
 if arg[DURABILITY]
+htd.setFlushPolicyClassName(arg.delete(FLUSH_POLICY)) if 
arg[FLUSH_POLICY]
+htd.setRegionSplitPolicyClassName(arg.delete(SPLIT_POLICY)) if 
arg[SPLIT_POLICY]
+
htd.setRegionMemstoreReplication(JBoolean.valueOf(arg.delete(REGION_MEMSTORE_REPLICATION)))
 if arg[REGION_MEMSTORE_REPLICATION]
 
htd.setRegionReplication(JInteger.valueOf(arg.delete(REGION_REPLICATION))) if 
arg[REGION_REPLICATION]
 set_user_metadata(htd, arg.delete(METADATA)) if arg[METADATA]
 set_descriptor_config(htd, arg.delete(CONFIGURATION)) if 
arg[CONFIGURATION]
@@ -789,6 +796,10 @@ module Hbase
   
family.setBlockCacheEnabled(JBoolean.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::BLOCKCACHE)))
 if arg.include?(org.apache.hadoop.hbase.HColumnDescriptor::BLOCKCACHE)
   
family.setScope(JInteger.valueOf(arg.delete(org.apache.hadoop.hbase.HColumnDescriptor::REPLICATION_SCOPE)))
 if 

hbase git commit: HBASE-19430 Remove the SettableTimestamp and SettableSequenceId

2017-12-06 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 97d17ae19 -> d2b1578b7


HBASE-19430 Remove the SettableTimestamp and SettableSequenceId


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d2b1578b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d2b1578b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d2b1578b

Branch: refs/heads/master
Commit: d2b1578b735c1d3c42a9cfe9b9be36d9d6fbd7a5
Parents: 97d17ae
Author: Chia-Ping Tsai 
Authored: Thu Dec 7 09:29:09 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Dec 7 13:28:02 2017 +0800

--
 .../apache/hadoop/hbase/ByteBufferKeyValue.java |   4 +-
 .../java/org/apache/hadoop/hbase/CellUtil.java  |  28 +--
 .../org/apache/hadoop/hbase/ExtendedCell.java   |  65 +-
 .../hadoop/hbase/IndividualBytesFieldCell.java  |  60 +
 .../java/org/apache/hadoop/hbase/KeyValue.java  |   4 +-
 .../apache/hadoop/hbase/PrivateCellUtil.java| 223 ---
 .../apache/hadoop/hbase/SettableSequenceId.java |  38 
 .../apache/hadoop/hbase/SettableTimestamp.java  |  45 
 .../io/encoding/BufferedDataBlockEncoder.java   |  11 +-
 .../hbase/TestIndividualBytesFieldCell.java |  21 +-
 .../hbase/mapreduce/HFileOutputFormat2.java |   1 -
 .../apache/hadoop/hbase/util/MapReduceCell.java |   4 +-
 .../hadoop/hbase/io/hfile/BlockCache.java   |   2 +-
 .../hadoop/hbase/regionserver/HRegion.java  |   6 +-
 .../visibility/VisibilityController.java|   2 +-
 15 files changed, 284 insertions(+), 230 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d2b1578b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java
index c59b947..cd66312 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java
@@ -296,8 +296,8 @@ public class ByteBufferKeyValue extends ByteBufferCell 
implements ExtendedCell {
   }
 
   @Override
-  public void setTimestamp(byte[] ts, int tsOffset) throws IOException {
-ByteBufferUtils.copyFromArrayToBuffer(this.buf, this.getTimestampOffset(), 
ts, tsOffset,
+  public void setTimestamp(byte[] ts) throws IOException {
+ByteBufferUtils.copyFromArrayToBuffer(this.buf, this.getTimestampOffset(), 
ts, 0,
 Bytes.SIZEOF_LONG);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/d2b1578b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index 9fabfdd..0940e63 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -34,16 +34,16 @@ import java.util.List;
 import java.util.Map.Entry;
 import java.util.NavigableMap;
 import java.util.Optional;
-
 import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.io.HeapSize;
-import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.util.ByteBufferUtils;
 import org.apache.hadoop.hbase.util.ByteRange;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceAudience.Private;
 
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+
 /**
  * Utility methods helpful for slinging {@link Cell} instances. Some methods 
below are for internal
  * use only and are marked InterfaceAudience.Private at the method level. Note 
that all such methods
@@ -564,7 +564,7 @@ public final class CellUtil {
* @return A new cell which is having the extra tags also added to it.
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
* Use CP environment to build Cell using {@link 
ExtendedCellBuilder}
-   *
+   *
*/
   @Deprecated
   public static Cell createCell(Cell cell, List tags) {
@@ -935,7 +935,7 @@ public final class CellUtil {
   }
 
   /**
-   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. 
+   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
*/
   @Deprecated
   public static boolean isDeleteFamily(final Cell cell) {
@@ -943,7 +943,7 @@ public final class CellUtil {
   }
 
   /**
-   * @deprecated As of release 2.0.0, this will be 

hbase git commit: HBASE-19430 Remove the SettableTimestamp and SettableSequenceId

2017-12-06 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 e7a2e4352 -> 4833e63d1


HBASE-19430 Remove the SettableTimestamp and SettableSequenceId


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4833e63d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4833e63d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4833e63d

Branch: refs/heads/branch-2
Commit: 4833e63d177f021443c074eee4512014014c2271
Parents: e7a2e43
Author: Chia-Ping Tsai 
Authored: Thu Dec 7 09:36:08 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Dec 7 13:34:38 2017 +0800

--
 .../apache/hadoop/hbase/ByteBufferKeyValue.java |   4 +-
 .../java/org/apache/hadoop/hbase/CellUtil.java  |  51 ++---
 .../org/apache/hadoop/hbase/ExtendedCell.java   |  65 +-
 .../hadoop/hbase/IndividualBytesFieldCell.java  |  60 +
 .../java/org/apache/hadoop/hbase/KeyValue.java  |   4 +-
 .../apache/hadoop/hbase/PrivateCellUtil.java| 223 ---
 .../apache/hadoop/hbase/SettableSequenceId.java |  38 
 .../apache/hadoop/hbase/SettableTimestamp.java  |  45 
 .../io/encoding/BufferedDataBlockEncoder.java   |  11 +-
 .../hbase/TestIndividualBytesFieldCell.java |  21 +-
 .../hbase/mapreduce/HFileOutputFormat2.java |   1 -
 .../apache/hadoop/hbase/util/MapReduceCell.java |   4 +-
 .../hadoop/hbase/io/hfile/BlockCache.java   |   2 +-
 .../hadoop/hbase/regionserver/HRegion.java  |   6 +-
 .../visibility/VisibilityController.java|   2 +-
 15 files changed, 284 insertions(+), 253 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4833e63d/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java
index c59b947..cd66312 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/ByteBufferKeyValue.java
@@ -296,8 +296,8 @@ public class ByteBufferKeyValue extends ByteBufferCell 
implements ExtendedCell {
   }
 
   @Override
-  public void setTimestamp(byte[] ts, int tsOffset) throws IOException {
-ByteBufferUtils.copyFromArrayToBuffer(this.buf, this.getTimestampOffset(), 
ts, tsOffset,
+  public void setTimestamp(byte[] ts) throws IOException {
+ByteBufferUtils.copyFromArrayToBuffer(this.buf, this.getTimestampOffset(), 
ts, 0,
 Bytes.SIZEOF_LONG);
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/4833e63d/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
index a749057..75225b4 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/CellUtil.java
@@ -34,16 +34,16 @@ import java.util.List;
 import java.util.Map.Entry;
 import java.util.NavigableMap;
 import java.util.Optional;
-
 import org.apache.hadoop.hbase.KeyValue.Type;
 import org.apache.hadoop.hbase.io.HeapSize;
-import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.hbase.util.ByteBufferUtils;
 import org.apache.hadoop.hbase.util.ByteRange;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.yetus.audience.InterfaceAudience.Private;
 
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+
 /**
  * Utility methods helpful for slinging {@link Cell} instances. Some methods 
below are for internal
  * use only and are marked InterfaceAudience.Private at the method level. Note 
that all such methods
@@ -505,7 +505,7 @@ public final class CellUtil {
   }
 
   /**
-   * Marked as audience Private as of 1.2.0. 
+   * Marked as audience Private as of 1.2.0.
* Creating a Cell with tags and a memstoreTS/mvcc is an
* internal implementation detail not for public use.
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. Use
@@ -529,7 +529,7 @@ public final class CellUtil {
   }
 
   /**
-   * Marked as audience Private as of 1.2.0. 
+   * Marked as audience Private as of 1.2.0.
* Creating a Cell with tags is an internal implementation detail not for 
public use.
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0. Use
* {@link ExtendedCellBuilder} instead
@@ -1169,34 +1169,24 @@ public final class CellUtil {

hbase git commit: HBASE-19439 Mark ShortCircuitMasterConnection with InterfaceAudience Private.

2017-12-06 Thread anoopsamjohn
Repository: hbase
Updated Branches:
  refs/heads/branch-2 4833e63d1 -> e88b3c65b


HBASE-19439 Mark ShortCircuitMasterConnection  with InterfaceAudience Private.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e88b3c65
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e88b3c65
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e88b3c65

Branch: refs/heads/branch-2
Commit: e88b3c65b9a86de97773cf96490bbd212b8925d3
Parents: 4833e63
Author: anoopsamjohn 
Authored: Thu Dec 7 11:22:40 2017 +0530
Committer: anoopsamjohn 
Committed: Thu Dec 7 11:24:00 2017 +0530

--
 .../main/java/org/apache/hadoop/hbase/client/MasterCallable.java   | 2 ++
 .../org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java  | 2 ++
 .../apache/hadoop/hbase/client/ShortCircuitMasterConnection.java   | 2 +-
 3 files changed, 5 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e88b3c65/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCallable.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCallable.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCallable.java
index 950b1d3..f56ebd5 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCallable.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCallable.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * A RetryingCallable for Master RPC operations.
@@ -41,6 +42,7 @@ import org.apache.hadoop.hbase.util.Bytes;
  * it has to deal with Coprocessor Endpoints.
  * @param  return type
  */
+@InterfaceAudience.Private
 abstract class MasterCallable implements RetryingCallable, Closeable {
   protected final ClusterConnection connection;
   protected MasterKeepAliveConnection master;

http://git-wip-us.apache.org/repos/asf/hbase/blob/e88b3c65/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
index 1dc3a47..b1c3777 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
@@ -21,6 +21,7 @@
 package org.apache.hadoop.hbase.client;
 
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * A KeepAlive connection is not physically closed immediately after the close,
@@ -33,6 +34,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
  * against the master on the MasterProtos.MasterService.BlockingInterface; but 
not by
  * final user code. Hence it's package protected.
  */
+@InterfaceAudience.Private
 interface MasterKeepAliveConnection extends 
MasterProtos.MasterService.BlockingInterface {
   // Do this instead of implement Closeable because closeable returning IOE is 
PITA.
   void close();

http://git-wip-us.apache.org/repos/asf/hbase/blob/e88b3c65/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java
index 527f722..d9277c6 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java
@@ -173,7 +173,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.Updat
  * A short-circuit connection that can bypass the RPC layer (serialization, 
deserialization,
  * networking, etc..) when talking to a local master
  */
-@InterfaceAudience.Public
+@InterfaceAudience.Private
 public class ShortCircuitMasterConnection implements MasterKeepAliveConnection 
{
 
   private final MasterService.BlockingInterface stub;



hbase git commit: HBASE-19439 Mark ShortCircuitMasterConnection with InterfaceAudience Private.

2017-12-06 Thread anoopsamjohn
Repository: hbase
Updated Branches:
  refs/heads/master d2b1578b7 -> 98a21ef1b


HBASE-19439 Mark ShortCircuitMasterConnection  with InterfaceAudience Private.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/98a21ef1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/98a21ef1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/98a21ef1

Branch: refs/heads/master
Commit: 98a21ef1bae01e209cd1816e37f3afd89a6e473b
Parents: d2b1578
Author: anoopsamjohn 
Authored: Thu Dec 7 11:22:40 2017 +0530
Committer: anoopsamjohn 
Committed: Thu Dec 7 11:22:40 2017 +0530

--
 .../main/java/org/apache/hadoop/hbase/client/MasterCallable.java   | 2 ++
 .../org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java  | 2 ++
 .../apache/hadoop/hbase/client/ShortCircuitMasterConnection.java   | 2 +-
 3 files changed, 5 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/98a21ef1/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCallable.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCallable.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCallable.java
index 950b1d3..f56ebd5 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCallable.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCallable.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * A RetryingCallable for Master RPC operations.
@@ -41,6 +42,7 @@ import org.apache.hadoop.hbase.util.Bytes;
  * it has to deal with Coprocessor Endpoints.
  * @param  return type
  */
+@InterfaceAudience.Private
 abstract class MasterCallable implements RetryingCallable, Closeable {
   protected final ClusterConnection connection;
   protected MasterKeepAliveConnection master;

http://git-wip-us.apache.org/repos/asf/hbase/blob/98a21ef1/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
index 1dc3a47..b1c3777 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
@@ -21,6 +21,7 @@
 package org.apache.hadoop.hbase.client;
 
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * A KeepAlive connection is not physically closed immediately after the close,
@@ -33,6 +34,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
  * against the master on the MasterProtos.MasterService.BlockingInterface; but 
not by
  * final user code. Hence it's package protected.
  */
+@InterfaceAudience.Private
 interface MasterKeepAliveConnection extends 
MasterProtos.MasterService.BlockingInterface {
   // Do this instead of implement Closeable because closeable returning IOE is 
PITA.
   void close();

http://git-wip-us.apache.org/repos/asf/hbase/blob/98a21ef1/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java
index 527f722..d9277c6 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java
@@ -173,7 +173,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos.Updat
  * A short-circuit connection that can bypass the RPC layer (serialization, 
deserialization,
  * networking, etc..) when talking to a local master
  */
-@InterfaceAudience.Public
+@InterfaceAudience.Private
 public class ShortCircuitMasterConnection implements MasterKeepAliveConnection 
{
 
   private final MasterService.BlockingInterface stub;



hbase git commit: HBASE-15628 Implement an AsyncOutputStream which can work with any FileSystem implementation; ADDENDUM -- Change Log output

2017-12-06 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master eabad8a91 -> 00750fe79


HBASE-15628 Implement an AsyncOutputStream which can work with any FileSystem 
implementation; ADDENDUM -- Change Log output


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/00750fe7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/00750fe7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/00750fe7

Branch: refs/heads/master
Commit: 00750fe79acbb6a43daa62b9fcabbd1f5ce4cf6c
Parents: eabad8a
Author: Michael Stack 
Authored: Wed Dec 6 11:35:54 2017 -0800
Committer: Michael Stack 
Committed: Wed Dec 6 11:35:54 2017 -0800

--
 .../hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java   | 6 --
 1 file changed, 4 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/00750fe7/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
index 61aa97c..08e1aae 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
@@ -409,11 +409,13 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
 
   private static PBHelper createPBHelper() throws NoSuchMethodException {
 Class helperClass;
+String clazzName = "org.apache.hadoop.hdfs.protocolPB.PBHelperClient";
 try {
-  helperClass = 
Class.forName("org.apache.hadoop.hdfs.protocolPB.PBHelperClient");
+  helperClass = Class.forName(clazzName);
 } catch (ClassNotFoundException e) {
-  LOG.debug("No PBHelperClient class found, should be hadoop 2.7-", e);
   helperClass = org.apache.hadoop.hdfs.protocolPB.PBHelper.class;
+  LOG.debug(""  + clazzName + " not found (Hadoop is pre-2.8.0?); using " +
+  helperClass.toString() + " instead.");
 }
 Method convertEBMethod = helperClass.getMethod("convert", 
ExtendedBlock.class);
 Method convertTokenMethod = helperClass.getMethod("convert", Token.class);



hbase git commit: HBASE-19422 Provide clear error message on use of wrong hadoop-profile property

2017-12-06 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 9ecd8589c -> 319088340


HBASE-19422 Provide clear error message on use of wrong hadoop-profile property

Signed-off-by: Apekshit Sharma 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/31908834
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/31908834
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/31908834

Branch: refs/heads/branch-1.4
Commit: 31908834088f93ed232478f36af3a803d76b1611
Parents: 9ecd858
Author: Mike Drob 
Authored: Mon Dec 4 16:54:01 2017 -0600
Committer: Andrew Purtell 
Committed: Wed Dec 6 11:09:29 2017 -0800

--
 pom.xml | 7 ++-
 1 file changed, 6 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/31908834/pom.xml
--
diff --git a/pom.xml b/pom.xml
index fa0dc42..545abdf 100644
--- a/pom.xml
+++ b/pom.xml
@@ -846,12 +846,17 @@
 
 
   
-min-maven-min-java-banned-xerces
+hadoop-profile-min-maven-min-java-banned-xerces
 
   enforce
 
 
   
+
+
+  System.getProperty("hadoop-profile", 
"").isEmpty()
+  The hadoop-profile property is unused, did you mean 
to set hadoop.profile instead?
+
 
 
   [${maven.min.version},)



hbase git commit: HBASE-15628 Implement an AsyncOutputStream which can work with any FileSystem implementation; ADDENDUM -- Change Log output

2017-12-06 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 b2f9b7bc1 -> 52ffa68f8


HBASE-15628 Implement an AsyncOutputStream which can work with any FileSystem 
implementation; ADDENDUM -- Change Log output


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/52ffa68f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/52ffa68f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/52ffa68f

Branch: refs/heads/branch-2
Commit: 52ffa68f8bf94131af18fff79a8b44618ad3515c
Parents: b2f9b7b
Author: Michael Stack 
Authored: Wed Dec 6 11:35:54 2017 -0800
Committer: Michael Stack 
Committed: Wed Dec 6 11:36:20 2017 -0800

--
 .../hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java   | 6 --
 1 file changed, 4 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/52ffa68f/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
index 61aa97c..08e1aae 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputHelper.java
@@ -409,11 +409,13 @@ public final class FanOutOneBlockAsyncDFSOutputHelper {
 
   private static PBHelper createPBHelper() throws NoSuchMethodException {
 Class helperClass;
+String clazzName = "org.apache.hadoop.hdfs.protocolPB.PBHelperClient";
 try {
-  helperClass = 
Class.forName("org.apache.hadoop.hdfs.protocolPB.PBHelperClient");
+  helperClass = Class.forName(clazzName);
 } catch (ClassNotFoundException e) {
-  LOG.debug("No PBHelperClient class found, should be hadoop 2.7-", e);
   helperClass = org.apache.hadoop.hdfs.protocolPB.PBHelper.class;
+  LOG.debug(""  + clazzName + " not found (Hadoop is pre-2.8.0?); using " +
+  helperClass.toString() + " instead.");
 }
 Method convertEBMethod = helperClass.getMethod("convert", 
ExtendedBlock.class);
 Method convertTokenMethod = helperClass.getMethod("convert", Token.class);



[1/2] hbase git commit: HBASE-19410 Move zookeeper related UTs to hbase-zookeeper and mark them as ZKTests

2017-12-06 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 6da52052e -> 75cdbb570


http://git-wip-us.apache.org/repos/asf/hbase/blob/75cdbb57/hbase-zookeeper/pom.xml
--
diff --git a/hbase-zookeeper/pom.xml b/hbase-zookeeper/pom.xml
index 06b7dff..7b5fa42 100644
--- a/hbase-zookeeper/pom.xml
+++ b/hbase-zookeeper/pom.xml
@@ -62,20 +62,6 @@
   true
 
   
-  
-  
-org.apache.maven.plugins
-maven-source-plugin
-
-  
-package
-
-  jar
-  test-jar
-
-  
-
-  
   
   
 org.apache.maven.plugins

http://git-wip-us.apache.org/repos/asf/hbase/blob/75cdbb57/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java
--
diff --git 
a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java
 
b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java
new file mode 100644
index 000..fc31c37
--- /dev/null
+++ 
b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java
@@ -0,0 +1,216 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.UUID;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
+import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * Helpers for testing HBase that do not depend on specific server/etc. 
things. The main difference
+ * from {@link HBaseCommonTestingUtility} is that we can start a zookeeper 
cluster.
+ */
+@InterfaceAudience.Public
+public class HBaseZKTestingUtility extends HBaseCommonTestingUtility {
+
+  private MiniZooKeeperCluster zkCluster;
+
+  /**
+   * Set if we were passed a zkCluster. If so, we won't shutdown zk as part of 
general shutdown.
+   */
+  private boolean passedZkCluster;
+
+  protected ZKWatcher zooKeeperWatcher;
+
+  /** Directory (a subdirectory of dataTestDir) used by the dfs cluster if any 
*/
+  protected File clusterTestDir;
+
+  public HBaseZKTestingUtility() {
+this(HBaseConfiguration.create());
+  }
+
+  public HBaseZKTestingUtility(Configuration conf) {
+super(conf);
+  }
+
+  /**
+   * @return Where the cluster will write data on the local subsystem. Creates 
it if it does not
+   * exist already. A subdir of {@link #getBaseTestDir()}
+   * @see #getTestFileSystem()
+   */
+  Path getClusterTestDir() {
+if (clusterTestDir == null) {
+  setupClusterTestDir();
+}
+return new Path(clusterTestDir.getAbsolutePath());
+  }
+
+  /**
+   * Creates a directory for the cluster, under the test data
+   */
+  protected void setupClusterTestDir() {
+if (clusterTestDir != null) {
+  return;
+}
+
+// Using randomUUID ensures that multiple clusters can be launched by
+// a same test, if it stops & starts them
+Path testDir = getDataTestDir("cluster_" + UUID.randomUUID().toString());
+clusterTestDir = new File(testDir.toString()).getAbsoluteFile();
+// Have it cleaned up on exit
+boolean b = deleteOnExit();
+if (b) {
+  clusterTestDir.deleteOnExit();
+}
+LOG.info("Created new mini-cluster data directory: " + clusterTestDir + ", 
deleteOnExit=" + b);
+  }
+
+  /**
+   * Call this if you only want a zk cluster.
+   * @see #shutdownMiniZKCluster()
+   * @return zk cluster started.
+   */
+  public MiniZooKeeperCluster startMiniZKCluster() throws Exception {
+return startMiniZKCluster(1);
+  }
+
+  /**
+   * Call this if you only want a zk cluster.
+   * @see #shutdownMiniZKCluster()
+   * @return zk cluster started.
+   */
+  public MiniZooKeeperCluster startMiniZKCluster(int zooKeeperServerNum, 
int... clientPortList)
+  throws Exception {
+setupClusterTestDir();
+return startMiniZKCluster(clusterTestDir, zooKeeperServerNum, 
clientPortList);
+  }
+
+  /**
+   * Start a mini ZK cluster. 

[1/2] hbase git commit: HBASE-19410 Move zookeeper related UTs to hbase-zookeeper and mark them as ZKTests

2017-12-06 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-2 142e6bb9d -> 03cb58158


http://git-wip-us.apache.org/repos/asf/hbase/blob/03cb5815/hbase-zookeeper/pom.xml
--
diff --git a/hbase-zookeeper/pom.xml b/hbase-zookeeper/pom.xml
index 7b2f8d6..ad2e481 100644
--- a/hbase-zookeeper/pom.xml
+++ b/hbase-zookeeper/pom.xml
@@ -62,20 +62,6 @@
   true
 
   
-  
-  
-org.apache.maven.plugins
-maven-source-plugin
-
-  
-package
-
-  jar
-  test-jar
-
-  
-
-  
   
   
 org.apache.maven.plugins

http://git-wip-us.apache.org/repos/asf/hbase/blob/03cb5815/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java
--
diff --git 
a/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java
 
b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java
new file mode 100644
index 000..fc31c37
--- /dev/null
+++ 
b/hbase-zookeeper/src/test/java/org/apache/hadoop/hbase/HBaseZKTestingUtility.java
@@ -0,0 +1,216 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.UUID;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
+import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
+import org.apache.yetus.audience.InterfaceAudience;
+
+/**
+ * Helpers for testing HBase that do not depend on specific server/etc. 
things. The main difference
+ * from {@link HBaseCommonTestingUtility} is that we can start a zookeeper 
cluster.
+ */
+@InterfaceAudience.Public
+public class HBaseZKTestingUtility extends HBaseCommonTestingUtility {
+
+  private MiniZooKeeperCluster zkCluster;
+
+  /**
+   * Set if we were passed a zkCluster. If so, we won't shutdown zk as part of 
general shutdown.
+   */
+  private boolean passedZkCluster;
+
+  protected ZKWatcher zooKeeperWatcher;
+
+  /** Directory (a subdirectory of dataTestDir) used by the dfs cluster if any 
*/
+  protected File clusterTestDir;
+
+  public HBaseZKTestingUtility() {
+this(HBaseConfiguration.create());
+  }
+
+  public HBaseZKTestingUtility(Configuration conf) {
+super(conf);
+  }
+
+  /**
+   * @return Where the cluster will write data on the local subsystem. Creates 
it if it does not
+   * exist already. A subdir of {@link #getBaseTestDir()}
+   * @see #getTestFileSystem()
+   */
+  Path getClusterTestDir() {
+if (clusterTestDir == null) {
+  setupClusterTestDir();
+}
+return new Path(clusterTestDir.getAbsolutePath());
+  }
+
+  /**
+   * Creates a directory for the cluster, under the test data
+   */
+  protected void setupClusterTestDir() {
+if (clusterTestDir != null) {
+  return;
+}
+
+// Using randomUUID ensures that multiple clusters can be launched by
+// a same test, if it stops & starts them
+Path testDir = getDataTestDir("cluster_" + UUID.randomUUID().toString());
+clusterTestDir = new File(testDir.toString()).getAbsoluteFile();
+// Have it cleaned up on exit
+boolean b = deleteOnExit();
+if (b) {
+  clusterTestDir.deleteOnExit();
+}
+LOG.info("Created new mini-cluster data directory: " + clusterTestDir + ", 
deleteOnExit=" + b);
+  }
+
+  /**
+   * Call this if you only want a zk cluster.
+   * @see #shutdownMiniZKCluster()
+   * @return zk cluster started.
+   */
+  public MiniZooKeeperCluster startMiniZKCluster() throws Exception {
+return startMiniZKCluster(1);
+  }
+
+  /**
+   * Call this if you only want a zk cluster.
+   * @see #shutdownMiniZKCluster()
+   * @return zk cluster started.
+   */
+  public MiniZooKeeperCluster startMiniZKCluster(int zooKeeperServerNum, 
int... clientPortList)
+  throws Exception {
+setupClusterTestDir();
+return startMiniZKCluster(clusterTestDir, zooKeeperServerNum, 
clientPortList);
+  }
+
+  /**
+   * Start a mini ZK 

[2/2] hbase git commit: HBASE-19410 Move zookeeper related UTs to hbase-zookeeper and mark them as ZKTests

2017-12-06 Thread zhangduo
HBASE-19410 Move zookeeper related UTs to hbase-zookeeper and mark them as 
ZKTests


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/75cdbb57
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/75cdbb57
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/75cdbb57

Branch: refs/heads/master
Commit: 75cdbb57001ac6195b10381c7032ba04d55b4577
Parents: 6da5205
Author: zhangduo 
Authored: Wed Dec 6 16:38:34 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 6 16:38:34 2017 +0800

--
 .../hbase/testclassification/ZKTests.java   |   4 +
 .../hadoop/hbase/HBaseCommonTestingUtility.java |  27 +-
 .../java/org/apache/hadoop/hbase/Waiter.java|   4 +-
 hbase-endpoint/pom.xml  |  13 +-
 hbase-mapreduce/pom.xml |   8 +
 hbase-server/pom.xml|  20 +-
 .../hadoop/hbase/HBaseTestingUtility.java   | 278 ++---
 .../hbase/client/TestZKAsyncRegistry.java   |   4 +-
 .../hadoop/hbase/zookeeper/TestHQuorumPeer.java | 106 -
 .../zookeeper/TestRecoverableZooKeeper.java | 122 --
 .../hbase/zookeeper/TestZKLeaderManager.java| 236 ---
 .../hbase/zookeeper/TestZKMainServer.java   | 118 --
 .../hadoop/hbase/zookeeper/TestZKMulti.java | 392 ---
 .../hbase/zookeeper/TestZKNodeTracker.java  | 351 -
 .../hbase/zookeeper/TestZooKeeperACL.java   |  19 +-
 hbase-spark/pom.xml |  12 +-
 hbase-testing-util/pom.xml  |  12 +
 hbase-zookeeper/pom.xml |  14 -
 .../hadoop/hbase/HBaseZKTestingUtility.java | 216 ++
 .../hadoop/hbase/zookeeper/TestHQuorumPeer.java | 104 +
 .../hbase/zookeeper/TestReadOnlyZKClient.java   |  39 +-
 .../zookeeper/TestRecoverableZooKeeper.java | 123 ++
 .../hbase/zookeeper/TestZKLeaderManager.java| 239 +++
 .../hbase/zookeeper/TestZKMainServer.java   | 119 ++
 .../hadoop/hbase/zookeeper/TestZKMulti.java | 390 ++
 .../hbase/zookeeper/TestZKNodeTracker.java  | 341 
 pom.xml |   8 +
 27 files changed, 1682 insertions(+), 1637 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/75cdbb57/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/ZKTests.java
--
diff --git 
a/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/ZKTests.java
 
b/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/ZKTests.java
index ad869fa..0951c10 100644
--- 
a/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/ZKTests.java
+++ 
b/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/ZKTests.java
@@ -17,5 +17,9 @@
  */
 package org.apache.hadoop.hbase.testclassification;
 
+/**
+ * For tests which test the general logic of zookeeper related tools, such as
+ * {@code RecoverableZooKeeper}, not for tests which depend on zookeeper.
+ */
 public interface ZKTests {
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/75cdbb57/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
index d153637..a503820 100644
--- 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
@@ -29,8 +29,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
-import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.Waiter.Predicate;
 import org.apache.hadoop.hbase.io.compress.Compression;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Common helpers for testing HBase that do not depend on specific server/etc. 
things.
@@ -223,4 +224,28 @@ public class HBaseCommonTestingUtility {
 } while (ntries < 30);
 return ntries < 30;
   }
+
+  /**
+   * Wrapper method for {@link Waiter#waitFor(Configuration, long, Predicate)}.
+   */
+  public  long waitFor(long timeout, Predicate 
predicate)
+  throws E {
+return Waiter.waitFor(this.conf, timeout, predicate);
+  }
+
+  /**
+   * Wrapper method for {@link Waiter#waitFor(Configuration, long, long, 
Predicate)}.
+   */
+  public  long waitFor(long timeout, long 

[2/2] hbase git commit: HBASE-19410 Move zookeeper related UTs to hbase-zookeeper and mark them as ZKTests

2017-12-06 Thread zhangduo
HBASE-19410 Move zookeeper related UTs to hbase-zookeeper and mark them as 
ZKTests


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/03cb5815
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/03cb5815
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/03cb5815

Branch: refs/heads/branch-2
Commit: 03cb581585f40fb9d42c85db6ccdafb2b702ab9f
Parents: 142e6bb
Author: zhangduo 
Authored: Wed Dec 6 16:38:34 2017 +0800
Committer: zhangduo 
Committed: Wed Dec 6 16:38:58 2017 +0800

--
 .../hbase/testclassification/ZKTests.java   |   4 +
 .../hadoop/hbase/HBaseCommonTestingUtility.java |  27 +-
 .../java/org/apache/hadoop/hbase/Waiter.java|   4 +-
 hbase-endpoint/pom.xml  |  13 +-
 hbase-mapreduce/pom.xml |   8 +
 hbase-server/pom.xml|  20 +-
 .../hadoop/hbase/HBaseTestingUtility.java   | 278 ++---
 .../hbase/client/TestZKAsyncRegistry.java   |   4 +-
 .../hadoop/hbase/zookeeper/TestHQuorumPeer.java | 106 -
 .../zookeeper/TestRecoverableZooKeeper.java | 122 --
 .../hbase/zookeeper/TestZKLeaderManager.java| 236 ---
 .../hbase/zookeeper/TestZKMainServer.java   | 118 --
 .../hadoop/hbase/zookeeper/TestZKMulti.java | 392 ---
 .../hbase/zookeeper/TestZKNodeTracker.java  | 351 -
 .../hbase/zookeeper/TestZooKeeperACL.java   |  19 +-
 hbase-spark/pom.xml |  12 +-
 hbase-testing-util/pom.xml  |  12 +
 hbase-zookeeper/pom.xml |  14 -
 .../hadoop/hbase/HBaseZKTestingUtility.java | 216 ++
 .../hadoop/hbase/zookeeper/TestHQuorumPeer.java | 104 +
 .../hbase/zookeeper/TestReadOnlyZKClient.java   |  39 +-
 .../zookeeper/TestRecoverableZooKeeper.java | 123 ++
 .../hbase/zookeeper/TestZKLeaderManager.java| 239 +++
 .../hbase/zookeeper/TestZKMainServer.java   | 119 ++
 .../hadoop/hbase/zookeeper/TestZKMulti.java | 390 ++
 .../hbase/zookeeper/TestZKNodeTracker.java  | 341 
 pom.xml |   8 +
 27 files changed, 1682 insertions(+), 1637 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/03cb5815/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/ZKTests.java
--
diff --git 
a/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/ZKTests.java
 
b/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/ZKTests.java
index ad869fa..0951c10 100644
--- 
a/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/ZKTests.java
+++ 
b/hbase-annotations/src/test/java/org/apache/hadoop/hbase/testclassification/ZKTests.java
@@ -17,5 +17,9 @@
  */
 package org.apache.hadoop.hbase.testclassification;
 
+/**
+ * For tests which test the general logic of zookeeper related tools, such as
+ * {@code RecoverableZooKeeper}, not for tests which depend on zookeeper.
+ */
 public interface ZKTests {
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/03cb5815/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
index d153637..a503820 100644
--- 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
@@ -29,8 +29,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
-import org.apache.yetus.audience.InterfaceAudience;
+import org.apache.hadoop.hbase.Waiter.Predicate;
 import org.apache.hadoop.hbase.io.compress.Compression;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Common helpers for testing HBase that do not depend on specific server/etc. 
things.
@@ -223,4 +224,28 @@ public class HBaseCommonTestingUtility {
 } while (ntries < 30);
 return ntries < 30;
   }
+
+  /**
+   * Wrapper method for {@link Waiter#waitFor(Configuration, long, Predicate)}.
+   */
+  public  long waitFor(long timeout, Predicate 
predicate)
+  throws E {
+return Waiter.waitFor(this.conf, timeout, predicate);
+  }
+
+  /**
+   * Wrapper method for {@link Waiter#waitFor(Configuration, long, long, 
Predicate)}.
+   */
+  public  long waitFor(long timeout, long 

hbase git commit: HBASE-19023 Replace hbase-server with hbase-mapreduce for HBase and MapReduce chapter

2017-12-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 27ed4d8ad -> eabad8a91


HBASE-19023 Replace hbase-server with hbase-mapreduce for HBase and MapReduce 
chapter

RowCounter and other related HBase's MapReduce classes have been moved
to hbase-mapreduce component by HBASE-18640, related chapter was
out-of-date and this fix replaced hbase-server with hbase-mapreduce
to correct those commands

Also this change moved RowCounter_Counters.properties to
hbase-mapreduce package as well

JIRA https://issues.apache.org/jira/browse/HBASE-19023

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/eabad8a9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/eabad8a9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/eabad8a9

Branch: refs/heads/master
Commit: eabad8a91ccb73eba83b99f83a937e29a9a4c6fa
Parents: 27ed4d8
Author: TAK LON WU 
Authored: Fri Dec 1 15:25:59 2017 -0800
Committer: tedyu 
Committed: Wed Dec 6 09:01:19 2017 -0800

--
 .../hbase/mapred/RowCounter_Counters.properties | 21 
 .../mapreduce/RowCounter_Counters.properties| 21 
 .../hbase/mapred/RowCounter_Counters.properties | 21 
 .../mapreduce/RowCounter_Counters.properties| 21 
 src/main/asciidoc/_chapters/mapreduce.adoc  |  6 +++---
 src/main/asciidoc/_chapters/ops_mgt.adoc|  4 ++--
 .../asciidoc/_chapters/troubleshooting.adoc |  2 +-
 7 files changed, 48 insertions(+), 48 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/eabad8a9/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapred/RowCounter_Counters.properties
--
diff --git 
a/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapred/RowCounter_Counters.properties
 
b/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapred/RowCounter_Counters.properties
new file mode 100644
index 000..661e56d
--- /dev/null
+++ 
b/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapred/RowCounter_Counters.properties
@@ -0,0 +1,21 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# ResourceBundle properties file for RowCounter MR job
+
+CounterGroupName= RowCounter
+
+ROWS.name=Rows

http://git-wip-us.apache.org/repos/asf/hbase/blob/eabad8a9/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapreduce/RowCounter_Counters.properties
--
diff --git 
a/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapreduce/RowCounter_Counters.properties
 
b/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapreduce/RowCounter_Counters.properties
new file mode 100644
index 000..661e56d
--- /dev/null
+++ 
b/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapreduce/RowCounter_Counters.properties
@@ -0,0 +1,21 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# ResourceBundle properties file for RowCounter MR job
+
+CounterGroupName= RowCounter
+
+ROWS.name=Rows


hbase git commit: HBASE-19023 Replace hbase-server with hbase-mapreduce for HBase and MapReduce chapter

2017-12-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-2 b9f1f5a17 -> b2f9b7bc1


HBASE-19023 Replace hbase-server with hbase-mapreduce for HBase and MapReduce 
chapter

RowCounter and other related HBase's MapReduce classes have been moved
to hbase-mapreduce component by HBASE-18640, related chapter was
out-of-date and this fix replaced hbase-server with hbase-mapreduce
to correct those commands

Also this change moved RowCounter_Counters.properties
to hbase-mapreduce package as well

JIRA https://issues.apache.org/jira/browse/HBASE-19023

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b2f9b7bc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b2f9b7bc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b2f9b7bc

Branch: refs/heads/branch-2
Commit: b2f9b7bc194a9ab93ae25f103220a92396bc43ad
Parents: b9f1f5a
Author: TAK LON WU 
Authored: Fri Dec 1 15:25:59 2017 -0800
Committer: tedyu 
Committed: Wed Dec 6 09:04:02 2017 -0800

--
 .../hbase/mapred/RowCounter_Counters.properties | 21 
 .../mapreduce/RowCounter_Counters.properties| 21 
 .../hbase/mapred/RowCounter_Counters.properties | 21 
 .../mapreduce/RowCounter_Counters.properties| 21 
 src/main/asciidoc/_chapters/mapreduce.adoc  |  8 
 src/main/asciidoc/_chapters/ops_mgt.adoc|  4 ++--
 .../asciidoc/_chapters/troubleshooting.adoc |  2 +-
 7 files changed, 49 insertions(+), 49 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b2f9b7bc/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapred/RowCounter_Counters.properties
--
diff --git 
a/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapred/RowCounter_Counters.properties
 
b/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapred/RowCounter_Counters.properties
new file mode 100644
index 000..661e56d
--- /dev/null
+++ 
b/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapred/RowCounter_Counters.properties
@@ -0,0 +1,21 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# ResourceBundle properties file for RowCounter MR job
+
+CounterGroupName= RowCounter
+
+ROWS.name=Rows

http://git-wip-us.apache.org/repos/asf/hbase/blob/b2f9b7bc/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapreduce/RowCounter_Counters.properties
--
diff --git 
a/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapreduce/RowCounter_Counters.properties
 
b/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapreduce/RowCounter_Counters.properties
new file mode 100644
index 000..661e56d
--- /dev/null
+++ 
b/hbase-mapreduce/src/main/resources/org/apache/hadoop/hbase/mapreduce/RowCounter_Counters.properties
@@ -0,0 +1,21 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# ResourceBundle properties file for RowCounter MR job
+
+CounterGroupName= RowCounter
+
+ROWS.name=Rows


[1/6] hbase git commit: HBASE-19442 Backport HBASE-19065 to branch-1 (HRegion#bulkLoadHFiles() should wait for concurrent Region#flush() to finish).

2017-12-06 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 6c2d51132 -> f88304d16
  refs/heads/branch-1.2 199c0e6a3 -> a1e77f605
  refs/heads/branch-1.3 0875e8611 -> 6d8c080b1
  refs/heads/branch-1.4 6fcbdc0cc -> fda742b35


HBASE-19442 Backport HBASE-19065 to branch-1 (HRegion#bulkLoadHFiles() should 
wait for concurrent Region#flush() to finish).

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6d8c080b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6d8c080b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6d8c080b

Branch: refs/heads/branch-1.3
Commit: 6d8c080b1c98ef672a7bc24a76e95584a3dedbd0
Parents: 0875e86
Author: Pankaj Kumar 
Authored: Thu Dec 7 01:59:20 2017 +0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 14:15:04 2017 -0800

--
 .../main/java/org/apache/hadoop/hbase/regionserver/HRegion.java  | 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6d8c080b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 99cdf5f..9c75ef8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -5637,6 +5637,10 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   seqId = ((FlushResultImpl)fs).flushSequenceId;
 } else if (fs.getResult() == 
FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY) {
   seqId = ((FlushResultImpl)fs).flushSequenceId;
+} else if (fs.getResult() == FlushResult.Result.CANNOT_FLUSH) {
+  // CANNOT_FLUSH may mean that a flush is already on-going
+  // we need to wait for that flush to complete
+  waitForFlushes();
 } else {
   throw new IOException("Could not bulk load with an assigned 
sequential ID because the "+
 "flush didn't run. Reason for not flushing: " + 
((FlushResultImpl)fs).failureReason);



[5/6] hbase git commit: HBASE-19440 Not able to enable balancer with RSGroups once disabled

2017-12-06 Thread apurtell
HBASE-19440 Not able to enable balancer with RSGroups once disabled

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/48b41c4b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/48b41c4b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/48b41c4b

Branch: refs/heads/branch-1.4
Commit: 48b41c4b13fc4208d4f360e388a66dbe0ce533e3
Parents: 6fcbdc0
Author: Abhishek Singh Chouhan 
Authored: Wed Dec 6 19:43:30 2017 +0530
Committer: Andrew Purtell 
Committed: Wed Dec 6 14:18:57 2017 -0800

--
 .../hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java   |  2 +-
 .../org/apache/hadoop/hbase/rsgroup/TestRSGroups.java| 11 +++
 2 files changed, 12 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/48b41c4b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
index b844651..44bd946 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
@@ -684,7 +684,7 @@ public class RSGroupAdminEndpoint extends 
RSGroupAdminService
   @Override
   public boolean 
preBalanceSwitch(ObserverContext ctx,
   boolean newValue) throws IOException {
-return false;
+return newValue;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/48b41c4b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
index d3c546e..081c0a3 100644
--- 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
@@ -54,6 +54,7 @@ import java.io.IOException;
 import java.util.Iterator;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
@@ -279,6 +280,16 @@ public class TestRSGroups extends TestRSGroupsBase {
   }
 
   @Test
+  public void testRSGroupBalancerSwitch() throws IOException {
+//Balancer is initially off in the test, set to true and check
+assertFalse(admin.setBalancerRunning(true, true));
+assertTrue(admin.isBalancerEnabled());
+//Set balancer off and check if it actually turned off
+assertTrue(admin.setBalancerRunning(false,true));
+assertFalse(admin.isBalancerEnabled());
+  }
+
+  @Test
   public void testCloneSnapshot() throws Exception {
 final TableName tableName = 
TableName.valueOf(tablePrefix+"_testCloneSnapshot");
 LOG.info("testCloneSnapshot");



[3/6] hbase git commit: HBASE-19442 Backport HBASE-19065 to branch-1 (HRegion#bulkLoadHFiles() should wait for concurrent Region#flush() to finish).

2017-12-06 Thread apurtell
HBASE-19442 Backport HBASE-19065 to branch-1 (HRegion#bulkLoadHFiles() should 
wait for concurrent Region#flush() to finish).

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f88304d1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f88304d1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f88304d1

Branch: refs/heads/branch-1
Commit: f88304d162529dd93206afcbaee9001019e07acc
Parents: 2cab98f
Author: Pankaj Kumar 
Authored: Thu Dec 7 01:59:20 2017 +0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 14:18:44 2017 -0800

--
 .../main/java/org/apache/hadoop/hbase/regionserver/HRegion.java  | 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f88304d1/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 129e01d..92e8ef7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -5783,6 +5783,10 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   seqId = ((FlushResultImpl)fs).flushSequenceId;
 } else if (fs.getResult() == 
FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY) {
   seqId = ((FlushResultImpl)fs).flushSequenceId;
+} else if (fs.getResult() == FlushResult.Result.CANNOT_FLUSH) {
+  // CANNOT_FLUSH may mean that a flush is already on-going
+  // we need to wait for that flush to complete
+  waitForFlushes();
 } else {
   throw new IOException("Could not bulk load with an assigned 
sequential ID because the "+
 "flush didn't run. Reason for not flushing: " + 
((FlushResultImpl)fs).failureReason);



[2/6] hbase git commit: HBASE-19442 Backport HBASE-19065 to branch-1 (HRegion#bulkLoadHFiles() should wait for concurrent Region#flush() to finish).

2017-12-06 Thread apurtell
HBASE-19442 Backport HBASE-19065 to branch-1 (HRegion#bulkLoadHFiles() should 
wait for concurrent Region#flush() to finish).

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a1e77f60
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a1e77f60
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a1e77f60

Branch: refs/heads/branch-1.2
Commit: a1e77f6056ea184050a22e92d82b171af03f9128
Parents: 199c0e6
Author: Pankaj Kumar 
Authored: Thu Dec 7 01:59:20 2017 +0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 14:15:08 2017 -0800

--
 .../main/java/org/apache/hadoop/hbase/regionserver/HRegion.java  | 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a1e77f60/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 696f8c3..a19b3e8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -5469,6 +5469,10 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   seqId = ((FlushResultImpl)fs).flushSequenceId;
 } else if (fs.getResult() == 
FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY) {
   seqId = ((FlushResultImpl)fs).flushSequenceId;
+} else if (fs.getResult() == FlushResult.Result.CANNOT_FLUSH) {
+  // CANNOT_FLUSH may mean that a flush is already on-going
+  // we need to wait for that flush to complete
+  waitForFlushes();
 } else {
   throw new IOException("Could not bulk load with an assigned 
sequential ID because the "+
 "flush didn't run. Reason for not flushing: " + 
((FlushResultImpl)fs).failureReason);



[6/6] hbase git commit: HBASE-19442 Backport HBASE-19065 to branch-1 (HRegion#bulkLoadHFiles() should wait for concurrent Region#flush() to finish).

2017-12-06 Thread apurtell
HBASE-19442 Backport HBASE-19065 to branch-1 (HRegion#bulkLoadHFiles() should 
wait for concurrent Region#flush() to finish).

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fda742b3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fda742b3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fda742b3

Branch: refs/heads/branch-1.4
Commit: fda742b35b58cdac40e4b0c1891625346a2f9d47
Parents: 48b41c4
Author: Pankaj Kumar 
Authored: Thu Dec 7 01:59:20 2017 +0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 14:18:57 2017 -0800

--
 .../main/java/org/apache/hadoop/hbase/regionserver/HRegion.java  | 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/fda742b3/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 129e01d..92e8ef7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -5783,6 +5783,10 @@ public class HRegion implements HeapSize, 
PropagatingConfigurationObserver, Regi
   seqId = ((FlushResultImpl)fs).flushSequenceId;
 } else if (fs.getResult() == 
FlushResult.Result.CANNOT_FLUSH_MEMSTORE_EMPTY) {
   seqId = ((FlushResultImpl)fs).flushSequenceId;
+} else if (fs.getResult() == FlushResult.Result.CANNOT_FLUSH) {
+  // CANNOT_FLUSH may mean that a flush is already on-going
+  // we need to wait for that flush to complete
+  waitForFlushes();
 } else {
   throw new IOException("Could not bulk load with an assigned 
sequential ID because the "+
 "flush didn't run. Reason for not flushing: " + 
((FlushResultImpl)fs).failureReason);



[4/6] hbase git commit: HBASE-19440 Not able to enable balancer with RSGroups once disabled

2017-12-06 Thread apurtell
HBASE-19440 Not able to enable balancer with RSGroups once disabled

Signed-off-by: Andrew Purtell 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2cab98f6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2cab98f6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2cab98f6

Branch: refs/heads/branch-1
Commit: 2cab98f6beef0efb8798f05e0edf7883aceb1ae6
Parents: 6c2d511
Author: Abhishek Singh Chouhan 
Authored: Wed Dec 6 19:43:30 2017 +0530
Committer: Andrew Purtell 
Committed: Wed Dec 6 14:18:44 2017 -0800

--
 .../hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java   |  2 +-
 .../org/apache/hadoop/hbase/rsgroup/TestRSGroups.java| 11 +++
 2 files changed, 12 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2cab98f6/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
index b844651..44bd946 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupAdminEndpoint.java
@@ -684,7 +684,7 @@ public class RSGroupAdminEndpoint extends 
RSGroupAdminService
   @Override
   public boolean 
preBalanceSwitch(ObserverContext ctx,
   boolean newValue) throws IOException {
-return false;
+return newValue;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/2cab98f6/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
--
diff --git 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
index d3c546e..081c0a3 100644
--- 
a/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
+++ 
b/hbase-rsgroup/src/test/java/org/apache/hadoop/hbase/rsgroup/TestRSGroups.java
@@ -54,6 +54,7 @@ import java.io.IOException;
 import java.util.Iterator;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
@@ -279,6 +280,16 @@ public class TestRSGroups extends TestRSGroupsBase {
   }
 
   @Test
+  public void testRSGroupBalancerSwitch() throws IOException {
+//Balancer is initially off in the test, set to true and check
+assertFalse(admin.setBalancerRunning(true, true));
+assertTrue(admin.isBalancerEnabled());
+//Set balancer off and check if it actually turned off
+assertTrue(admin.setBalancerRunning(false,true));
+assertFalse(admin.isBalancerEnabled());
+  }
+
+  @Test
   public void testCloneSnapshot() throws Exception {
 final TableName tableName = 
TableName.valueOf(tablePrefix+"_testCloneSnapshot");
 LOG.info("testCloneSnapshot");



[hbase] Git Push Summary

2017-12-06 Thread apurtell
Repository: hbase
Updated Tags:  refs/tags/1.4.0RC0 7123ddfad -> 8c720fbd6


hbase git commit: Update CHANGES.txt for 1.4.0RC0

2017-12-06 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 fda742b35 -> 3bc2a6731


Update CHANGES.txt for 1.4.0RC0


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3bc2a673
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3bc2a673
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3bc2a673

Branch: refs/heads/branch-1.4
Commit: 3bc2a673150a69c7d2fa8f4dc48ab07d2dc704b5
Parents: fda742b
Author: Andrew Purtell 
Authored: Wed Dec 6 14:32:17 2017 -0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 14:32:17 2017 -0800

--
 CHANGES.txt | 4 
 1 file changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3bc2a673/CHANGES.txt
--
diff --git a/CHANGES.txt b/CHANGES.txt
index ac322ea..a833608 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -445,8 +445,12 @@ Release Notes - HBase - Version 1.4.0 12/18/2017
 * [HBASE-19395] - [branch-1] 
TestEndToEndSplitTransaction.testMasterOpsWhileSplitting fails with NPE
 * [HBASE-19396] - Fix flaky test TestHTableMultiplexerFlushCache
 * [HBASE-19406] - Fix CompactionRequest equals and hashCode
+* [HBASE-19422] - Provide clear error message on use of wrong 
hadoop-profile property
 * [HBASE-19423] - Replication entries are not filtered correctly when 
replication scope is set through WAL Co-processor
 * [HBASE-19429] - Release build fails in checkstyle phase of site target 
(branch-1)
+* [HBASE-19440] - Not able to enable balancer with RSGroups once disabled
+* [HBASE-19442] - Backport HBASE-19065 to branch-1 
(HRegion#bulkLoadHFiles() should wait for concurrent Region#flush() to finish).
+* [HBASE-19445] - PerformanceEvaluation NPE processing split policy option
 
 ** Improvement
 * [HBASE-11013] - Clone Snapshots on Secure Cluster Should provide option 
to apply Retained User Permissions



[2/2] hbase git commit: HBASE-19445 PerformanceEvaluation NPE processing split policy option

2017-12-06 Thread apurtell
HBASE-19445 PerformanceEvaluation NPE processing split policy option


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6c2d5113
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6c2d5113
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6c2d5113

Branch: refs/heads/branch-1
Commit: 6c2d51132ff2d16035cf5ea614d4887f4113f155
Parents: cf73199
Author: Andrew Purtell 
Authored: Wed Dec 6 14:08:55 2017 -0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 14:10:11 2017 -0800

--
 .../test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6c2d5113/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 7230b41..85d3613 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -384,7 +384,7 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 if (opts.replicas != DEFAULT_OPTS.replicas) {
   desc.setRegionReplication(opts.replicas);
 }
-if (!opts.splitPolicy.equals(DEFAULT_OPTS.splitPolicy)) {
+if (opts.splitPolicy != null && 
!opts.splitPolicy.equals(DEFAULT_OPTS.splitPolicy)) {
   desc.setRegionSplitPolicyClassName(opts.splitPolicy);
 }
 return desc;



[2/2] hbase git commit: HBASE-19445 PerformanceEvaluation NPE processing split policy option

2017-12-06 Thread apurtell
HBASE-19445 PerformanceEvaluation NPE processing split policy option


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3e7b90ac
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3e7b90ac
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3e7b90ac

Branch: refs/heads/master
Commit: 3e7b90ac6d808c171ea988d8d32ef998146713ac
Parents: 00750fe
Author: Andrew Purtell 
Authored: Wed Dec 6 14:08:55 2017 -0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 14:11:41 2017 -0800

--
 .../test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3e7b90ac/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index e2d23e5..8255573 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -406,7 +406,7 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 if (opts.replicas != DEFAULT_OPTS.replicas) {
   desc.setRegionReplication(opts.replicas);
 }
-if (opts.splitPolicy != DEFAULT_OPTS.splitPolicy) {
+if (opts.splitPolicy != null && 
!opts.splitPolicy.equals(DEFAULT_OPTS.splitPolicy)) {
   desc.setRegionSplitPolicyClassName(opts.splitPolicy);
 }
 return desc;



[1/2] hbase git commit: HBASE-19445 PerformanceEvaluation NPE processing split policy option

2017-12-06 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-2 52ffa68f8 -> 57df73ac6
  refs/heads/master 00750fe79 -> 3e7b90ac6


HBASE-19445 PerformanceEvaluation NPE processing split policy option


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/57df73ac
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/57df73ac
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/57df73ac

Branch: refs/heads/branch-2
Commit: 57df73ac6819e0e9fde89027a07fe09735eec675
Parents: 52ffa68
Author: Andrew Purtell 
Authored: Wed Dec 6 14:08:55 2017 -0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 14:11:40 2017 -0800

--
 .../test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/57df73ac/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index e2d23e5..8255573 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -406,7 +406,7 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 if (opts.replicas != DEFAULT_OPTS.replicas) {
   desc.setRegionReplication(opts.replicas);
 }
-if (opts.splitPolicy != DEFAULT_OPTS.splitPolicy) {
+if (opts.splitPolicy != null && 
!opts.splitPolicy.equals(DEFAULT_OPTS.splitPolicy)) {
   desc.setRegionSplitPolicyClassName(opts.splitPolicy);
 }
 return desc;



[1/2] hbase git commit: HBASE-19445 PerformanceEvaluation NPE processing split policy option

2017-12-06 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 cf73199d0 -> 6c2d51132
  refs/heads/branch-1.4 319088340 -> 6fcbdc0cc


HBASE-19445 PerformanceEvaluation NPE processing split policy option


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6fcbdc0c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6fcbdc0c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6fcbdc0c

Branch: refs/heads/branch-1.4
Commit: 6fcbdc0cc799ae8df965b4a0677e6fe2f1f7a54c
Parents: 3190883
Author: Andrew Purtell 
Authored: Wed Dec 6 14:08:55 2017 -0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 14:09:26 2017 -0800

--
 .../test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6fcbdc0c/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 7230b41..85d3613 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -384,7 +384,7 @@ public class PerformanceEvaluation extends Configured 
implements Tool {
 if (opts.replicas != DEFAULT_OPTS.replicas) {
   desc.setRegionReplication(opts.replicas);
 }
-if (!opts.splitPolicy.equals(DEFAULT_OPTS.splitPolicy)) {
+if (opts.splitPolicy != null && 
!opts.splitPolicy.equals(DEFAULT_OPTS.splitPolicy)) {
   desc.setRegionSplitPolicyClassName(opts.splitPolicy);
 }
 return desc;



hbase git commit: HBASE-19417 Remove boolean return value from postBulkLoadHFile hook

2017-12-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-2 57df73ac6 -> 497902731


HBASE-19417 Remove boolean return value from postBulkLoadHFile hook


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/49790273
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/49790273
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/49790273

Branch: refs/heads/branch-2
Commit: 497902731a5364b1f209b4f8ebba15604bcf1b3a
Parents: 57df73a
Author: tedyu 
Authored: Wed Dec 6 14:23:22 2017 -0800
Committer: tedyu 
Committed: Wed Dec 6 14:23:22 2017 -0800

--
 .../hbase/coprocessor/RegionObserver.java   | 11 +--
 .../hbase/regionserver/RSRpcServices.java   | 20 ++--
 .../regionserver/RegionCoprocessorHost.java | 16 +++-
 .../regionserver/SecureBulkLoadManager.java |  6 +-
 .../hbase/coprocessor/SimpleRegionObserver.java |  5 ++---
 5 files changed, 21 insertions(+), 37 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/49790273/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
index e036441..6b5527b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
@@ -974,13 +974,12 @@ public interface RegionObserver {
* @param ctx the environment provided by the region server
* @param stagingFamilyPaths pairs of { CF, HFile path } submitted for bulk 
load
* @param finalPaths Map of CF to List of file paths for the loaded files
-   * @param hasLoaded whether the bulkLoad was successful
-   * @return the new value of hasLoaded
+   *   if the Map is not null, the bulkLoad was successful. Otherwise the bulk 
load failed.
+   *   bulkload is done by the time this hook is called.
*/
-  default boolean 
postBulkLoadHFile(ObserverContext ctx,
-  List> stagingFamilyPaths, Map 
finalPaths,
-  boolean hasLoaded) throws IOException {
-return hasLoaded;
+  default void postBulkLoadHFile(ObserverContext 
ctx,
+  List> stagingFamilyPaths, Map 
finalPaths)
+  throws IOException {
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/49790273/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 58e2970..f5a35a4 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -2214,7 +2214,6 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
   checkOpen();
   requestCount.increment();
   HRegion region = getRegion(request.getRegion());
-  boolean loaded = false;
   Map map = null;
 
   // Check to see if this bulk load would exceed the space quota for this 
table
@@ -2233,24 +2232,20 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
 }
   }
 
+  List> familyPaths = new 
ArrayList<>(request.getFamilyPathCount());
+  for (FamilyPath familyPath : request.getFamilyPathList()) {
+familyPaths.add(new Pair<>(familyPath.getFamily().toByteArray(), 
familyPath.getPath()));
+  }
   if (!request.hasBulkToken()) {
-// Old style bulk load. This will not be supported in future releases
-List> familyPaths = new 
ArrayList<>(request.getFamilyPathCount());
-for (FamilyPath familyPath : request.getFamilyPathList()) {
-  familyPaths.add(new Pair<>(familyPath.getFamily().toByteArray(), 
familyPath.getPath()));
-}
 if (region.getCoprocessorHost() != null) {
   region.getCoprocessorHost().preBulkLoadHFile(familyPaths);
 }
 try {
   map = region.bulkLoadHFiles(familyPaths, request.getAssignSeqNum(), 
null,
   request.getCopyFile());
-  if (map != null) {
-loaded = true;
-  }
 } finally {
   if (region.getCoprocessorHost() != null) {
-loaded = 
region.getCoprocessorHost().postBulkLoadHFile(familyPaths, map, loaded);
+

hbase git commit: HBASE-19414 enable TestMasterOperationsForRegionReplicas#testIncompleteMetaTableReplicaInformation in branch-1.3 and branch-1.2

2017-12-06 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 e24c4e226 -> 0875e8611


HBASE-19414 enable 
TestMasterOperationsForRegionReplicas#testIncompleteMetaTableReplicaInformation 
in branch-1.3 and branch-1.2

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0875e861
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0875e861
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0875e861

Branch: refs/heads/branch-1.3
Commit: 0875e8611dfd2c15e5f41335a0164af80db039a2
Parents: e24c4e22
Author: Yung-An He 
Authored: Wed Dec 6 21:23:14 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Dec 6 21:26:41 2017 +0800

--
 .../hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0875e861/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
index 2112be7..a3a171e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
@@ -232,7 +232,7 @@ public class TestMasterOperationsForRegionReplicas {
 }
   }
 
-  //@Test (TODO: enable when we have support for alter_table- HBASE-10361).
+  @Test
   public void testIncompleteMetaTableReplicaInformation() throws Exception {
 final TableName table = TableName.valueOf("fooTableTest1");
 final int numRegions = 3;



hbase git commit: HBASE-19414 enable TestMasterOperationsForRegionReplicas#testIncompleteMetaTableReplicaInformation in branch-1.3 and branch-1.2

2017-12-06 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 358e2d7df -> 199c0e6a3


HBASE-19414 enable 
TestMasterOperationsForRegionReplicas#testIncompleteMetaTableReplicaInformation 
in branch-1.3 and branch-1.2

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/199c0e6a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/199c0e6a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/199c0e6a

Branch: refs/heads/branch-1.2
Commit: 199c0e6a3cafd6d3a0c3566b31d9c2e26df7c16f
Parents: 358e2d7
Author: Yung-An He 
Authored: Wed Dec 6 21:23:14 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Dec 6 21:24:15 2017 +0800

--
 .../hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/199c0e6a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
index 2112be7..a3a171e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterOperationsForRegionReplicas.java
@@ -232,7 +232,7 @@ public class TestMasterOperationsForRegionReplicas {
 }
   }
 
-  //@Test (TODO: enable when we have support for alter_table- HBASE-10361).
+  @Test
   public void testIncompleteMetaTableReplicaInformation() throws Exception {
 final TableName table = TableName.valueOf("fooTableTest1");
 final int numRegions = 3;



hbase git commit: HBASE-19295 The Configuration returned by CPEnv should be read-only.

2017-12-06 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 497902731 -> d29ffd4e2


HBASE-19295 The Configuration returned by CPEnv should be read-only.

Adds a ReadOnlyConfiguration that delegates gets but throws exception
on sets/adds, etc.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d29ffd4e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d29ffd4e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d29ffd4e

Branch: refs/heads/branch-2
Commit: d29ffd4e291a2f9722c65d518551f80ee60810dc
Parents: 4979027
Author: Michael Stack 
Authored: Mon Dec 4 20:58:02 2017 -0800
Committer: Michael Stack 
Committed: Wed Dec 6 15:18:53 2017 -0800

--
 .../hadoop/hbase/CoprocessorEnvironment.java|   5 +-
 .../hbase/coprocessor/BaseEnvironment.java  |   2 +-
 .../coprocessor/ReadOnlyConfiguration.java  | 439 +++
 .../TestCoprocessorConfiguration.java   |  29 +-
 4 files changed, 472 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d29ffd4e/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java
index 418d624..4fab733 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java
@@ -45,7 +45,10 @@ public interface CoprocessorEnvironment {
   /** @return the load sequence number */
   int getLoadSequence();
 
-  /** @return the configuration */
+  /**
+   * @return a Read-only Configuration; throws {@link 
UnsupportedOperationException} if you try
+   *   to set a configuration.
+   */
   Configuration getConfiguration();
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/d29ffd4e/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
index ebbca65..9f5ca23 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
@@ -57,7 +57,7 @@ public class BaseEnvironment 
implements CoprocessorEnviro
 this.priority = priority;
 this.state = Coprocessor.State.INSTALLED;
 this.seq = seq;
-this.conf = conf;
+this.conf = new ReadOnlyConfiguration(conf);
   }
 
   /** Initialize the environment */

http://git-wip-us.apache.org/repos/asf/hbase/blob/d29ffd4e/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.java
new file mode 100644
index 000..7f2ddc8
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.java
@@ -0,0 +1,439 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.coprocessor;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Reader;
+import java.io.Writer;
+import java.net.InetSocketAddress;
+import java.net.URL;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Pattern;
+

svn commit: r23418 [2/3] - /dev/hbase/hbase-1.4.0RC0/

2017-12-06 Thread apurtell

Added: dev/hbase/hbase-1.4.0RC0/hbase-1.3.1-1.4.0RC0_compatibility_report.html
==
--- dev/hbase/hbase-1.4.0RC0/hbase-1.3.1-1.4.0RC0_compatibility_report.html 
(added)
+++ dev/hbase/hbase-1.4.0RC0/hbase-1.3.1-1.4.0RC0_compatibility_report.html Thu 
Dec  7 00:27:13 2017
@@ -0,0 +1,8604 @@
+
+
+http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
+http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
+
+
+
+
+hbase: rel/1.3.1 to 1.4.0RC0 compatibility report
+
+body {
+font-family:Arial, sans-serif;
+background-color:White;
+color:Black;
+}
+hr {
+color:Black;
+background-color:Black;
+height:1px;
+border:0;
+}
+h1 {
+margin-bottom:0px;
+padding-bottom:0px;
+font-size:1.625em;
+}
+h2 {
+margin-bottom:0px;
+padding-bottom:0px;
+font-size:1.25em;
+white-space:nowrap;
+}
+div.symbols {
+color:#003E69;
+}
+div.symbols i {
+color:Brown;
+}
+span.section {
+font-weight:bold;
+cursor:pointer;
+color:#003E69;
+white-space:nowrap;
+margin-left:0.3125em;
+}
+span:hover.section {
+color:#336699;
+}
+span.sect_aff {
+cursor:pointer;
+padding-left:1.55em;
+font-size:0.875em;
+color:#cc3300;
+}
+span.ext {
+font-weight:100;
+}
+span.jar {
+color:#cc3300;
+font-size:0.875em;
+font-weight:bold;
+}
+div.jar_list {
+padding-left:0.4em;
+font-size:0.94em;
+}
+span.pkg_t {
+color:#408080;
+font-size:0.875em;
+}
+span.pkg {
+color:#408080;
+font-size:0.875em;
+font-weight:bold;
+}
+span.cname {
+color:Green;
+font-size:0.875em;
+font-weight:bold;
+}
+span.iname_b {
+font-weight:bold;
+}
+span.iname_a {
+color:#33;
+font-weight:bold;
+font-size:0.94em;
+}
+span.sym_p {
+font-weight:normal;
+white-space:normal;
+}
+span.sym_pd {
+white-space:normal;
+}
+span.sym_p span, span.sym_pd span {
+white-space:nowrap;
+}
+span.attr {
+color:Black;
+font-weight:100;
+}
+span.deprecated {
+color:Red;
+font-weight:bold;
+font-family:Monaco, monospace;
+}
+div.affect {
+padding-left:1em;
+padding-bottom:10px;
+font-size:0.87em;
+font-style:italic;
+line-height:0.9em;
+}
+div.affected {
+padding-left:2em;
+padding-top:10px;
+}
+table.ptable {
+border-collapse:collapse;
+border:1px outset black;
+margin-left:0.95em;
+margin-top:3px;
+margin-bottom:3px;
+width:56.25em;
+}
+table.ptable td {
+border:1px solid Gray;
+padding:3px;
+font-size:0.875em;
+text-align:left;
+vertical-align:top;
+max-width:28em;
+word-wrap:break-word;
+}
+table.ptable th {
+background-color:#ee;
+font-weight:bold;
+color:#33;
+font-family:Verdana, Arial;
+font-size:0.875em;
+border:1px solid Gray;
+text-align:center;
+vertical-align:top;
+white-space:nowrap;
+padding:3px;
+}
+table.summary {
+border-collapse:collapse;
+border:1px outset black;
+}
+table.summary th {
+background-color:#ee;
+font-weight:100;
+text-align:left;
+font-size:0.94em;
+white-space:nowrap;
+border:1px inset Gray;
+padding:3px;
+}
+table.summary td {
+text-align:right;
+white-space:nowrap;
+border:1px inset Gray;
+padding:3px 5px 3px 10px;
+}
+span.mngl {
+padding-left:1em;
+font-size:0.875em;
+cursor:text;
+color:#44;
+font-weight:bold;
+}
+span.pleft {
+padding-left:2.5em;
+}
+span.color_p {
+font-style:italic;
+color:Brown;
+}
+span.param {
+font-style:italic;
+}
+span.focus_p {
+font-style:italic;
+background-color:#DCDCDC;
+}
+span.ttype {
+font-weight:100;
+}
+span.nowrap {
+white-space:nowrap;
+}
+span.value {
+white-space:nowrap;
+font-weight:bold;
+}
+.passed {
+background-color:#CCFFCC;
+font-weight:100;
+}
+.warning {
+background-color:#F4F4AF;
+font-weight:100;
+}
+.failed {
+background-color:#FF;
+font-weight:100;
+}
+.new {
+background-color:#C6DEFF;
+font-weight:100;
+}
+
+.compatible {
+background-color:#CCFFCC;
+font-weight:100;
+}
+.almost_compatible {
+background-color:#FFDAA3;
+font-weight:100;
+}
+.incompatible {
+background-color:#FF;
+font-weight:100;
+}
+.gray {
+background-color:#DCDCDC;
+font-weight:100;
+}
+
+.top_ref {
+font-size:0.69em;
+}
+.footer {
+font-size:0.8125em;
+}
+.tabset {
+float:left;
+}
+a.tab {
+border:1px solid Black;
+float:left;
+margin:0px 5px -1px 0px;
+padding:3px 5px 3px 5px;
+position:relative;
+font-size:0.875em;
+background-color:#DDD;
+text-decoration:none;
+color:Black;
+}
+a.disabled:hover
+{
+color:Black;
+background:#EEE;
+}
+a.active:hover
+{
+color:Black;
+background:White;
+}
+a.active {
+border-bottom-color:White;
+background-color:White;
+}
+div.tab {
+

svn commit: r23418 [3/3] - /dev/hbase/hbase-1.4.0RC0/

2017-12-06 Thread apurtell
Added: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz
==
Binary file - no diff available.

Propchange: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz
--
svn:mime-type = application/octet-stream

Added: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.asc
==
--- dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.asc (added)
+++ dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.asc Thu Dec  7 00:27:13 2017
@@ -0,0 +1,16 @@
+-BEGIN PGP SIGNATURE-
+
+iQIzBAABCAAdFiEEUPHou3xnqxS9/AohhZd1TdU2XM0FAloogf8ACgkQhZd1TdU2
+XM3PcA/+K8tMPn2rgvfA0Vk2aOFJFvEncXUGbsY6qvQtb8I2doGarfDbn1YUPKTS
+KeZ4+QIptOOzJ4mbOAGf4JT2yVUc/102oPtFKuHR/zkowv3PQv5R3f1jF/a46WY7
+4y2D6Y4BfQz9hH0WKK0kL0kimRLaycBsEMoyy5YCUN+CQS0r8UHLkkiaOiM+zu22
+AjwM629Hlv+Xp0sacx+FXLz3cBTVtylabZ0W2PQEJofYgjVk+JbuhyhOD+yToYcu
+OjtrSI2QQ8E6w9YMB6jIjvJSSO0jSFhsfAwMFAmZjfjJwZ00M+3q1lPc2C0TPHEL
+xahYKFrElg3ary2jnqiLYUn1X5a63Q6qQ9snsFtpRr5Gh+A3q4xtkG5piZMJnaOF
+1kyJ+QYrX2A4pl4BUOa76uiGZywk0pPCrDYzHFnwCqMB4ka/yKSI1kizezlED65i
+1mREfcLP4FvDbsJAeHDQo4xIia827dqmns/X3vP1PWQGfCP1sB9bN+zaZXGFQFHa
+CpVXvSoDrBrUn+JWpO0KsbWYKH1a+w8olNXTCEu/0O05B7cdOcnuPwrCHsZ0pRB1
+O/KC4hgJtrVRCFksMWS6NDPvLVJIOkUu4smq6EeLvDmqOR/8Jc7qWjfexbH2rz9T
+c+K6m782nKvFeQ4ks5HY9iwPncz/2pcKJMnhZRkAx8YcvGHF6B8=
+=bqg8
+-END PGP SIGNATURE-

Added: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.md5
==
--- dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.md5 (added)
+++ dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.md5 Thu Dec  7 00:27:13 2017
@@ -0,0 +1 @@
+hbase-1.4.0-bin.tar.gz: AF 33 98 CE 9F D1 E2 BA  D8 54 0E 7B 0E 72 B1 A1

Added: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.sha
==
--- dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.sha (added)
+++ dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.sha Thu Dec  7 00:27:13 2017
@@ -0,0 +1,3 @@
+hbase-1.4.0-bin.tar.gz: 3C3699EE 27FEF0CF 64D88201 44BD0988 F700E76B 07BF7C69
+235E8697 8EEE6305 520EB712 4B0AA119 D39AA1AB D5D503F6
+B8A66354 39935E4A FC31FAEC 6EB9F885

Added: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz
==
Binary file - no diff available.

Propchange: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz
--
svn:mime-type = application/octet-stream

Added: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.asc
==
--- dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.asc (added)
+++ dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.asc Thu Dec  7 00:27:13 2017
@@ -0,0 +1,16 @@
+-BEGIN PGP SIGNATURE-
+
+iQIzBAABCAAdFiEEUPHou3xnqxS9/AohhZd1TdU2XM0FAloogf8ACgkQhZd1TdU2
+XM2AYA//S027Y5423WwlfeSXT9zrs5D5gVVUhyJZnHaGa9B86bi8MA+0wrA8pG7A
+Hj2z5I+fsIAtfgnLm4/FnSvG+3p+elYJs7mR3x4fRyFDoo6cMc6iNM/OTbFkysy5
+KJLzz748ydytojaqF9rP+jSn6AS4zDIuCpGpruXZR2aP5b2XhAMY5YtsdD6zZfG0
+1HbA21gHrTu/GbZRH/OXTlGgMs6YtCUPCHue/tJihrcbmBtX4VsB6b54Sp67A128
+kzAf1cGdoNvWGZT4d0NgCNo4v8Eo6tvJLnBCejAg3rPlTh6eKu3FZRdTwPAO8RKg
+UAwePRejteBrtULWdtHfpghAYWfWRXN4K9UR5bFkJPTGGWhMvhiGOgeyioLN1NDa
+zdMRIWigP1SiXxgwwLcYbhhgB4xfau5bYOr22h7fkYCVoWBun0cD13ppUCe/3/HN
+8B/Y209jzMHTx7GGUDpAYBpDsIkRPulsv0pZwIfmiG1PN4FfhzG8C03HBtsm3xeb
+gEsz5/l8ef5PLXc0uX1lciOMmZJ4PSvazlNYF8+nird7A5fev92halbyMq0L7tJx
+5Gl71nbRyOny79ZE0S3hMKWCErwBzlxguGG03d22vgaAYdo+13bLFEdmcGD3m6Cp
+n9wJzG3O9wKqS7cEo2qYkrqX6K2l+9Ce1SO1tS0aQxKtsThN4jI=
+=ES9G
+-END PGP SIGNATURE-

Added: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.md5
==
--- dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.md5 (added)
+++ dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.md5 Thu Dec  7 00:27:13 2017
@@ -0,0 +1 @@
+hbase-1.4.0-src.tar.gz: 73 D7 92 04 39 EE AD E9  16 69 4C BB 83 CA E3 67

Added: dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.sha
==
--- dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.sha (added)
+++ dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.sha Thu Dec  7 00:27:13 2017
@@ -0,0 +1,3 @@
+hbase-1.4.0-src.tar.gz: AAC38EF3 2F64538C 4831A839 5AE92F4C 16289057 19C9BBFA
+C958846B 53169F66 D391D7BC D974703D 2BCA2021 961C5D48
+55F43EB9 8DCDB409 13C302CB 0DDDE416




hbase git commit: HBASE-19295 The Configuration returned by CPEnv should be read-only.

2017-12-06 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 3e7b90ac6 -> 4a2e8b852


HBASE-19295 The Configuration returned by CPEnv should be read-only.

Adds a ReadOnlyConfiguration that delegates gets but throws exception
on sets/adds, etc.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4a2e8b85
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4a2e8b85
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4a2e8b85

Branch: refs/heads/master
Commit: 4a2e8b852dceb8f68adf33c940621734cc6e2f12
Parents: 3e7b90a
Author: Michael Stack 
Authored: Mon Dec 4 20:58:02 2017 -0800
Committer: Michael Stack 
Committed: Wed Dec 6 15:18:09 2017 -0800

--
 .../hadoop/hbase/CoprocessorEnvironment.java|   5 +-
 .../hbase/coprocessor/BaseEnvironment.java  |   2 +-
 .../coprocessor/ReadOnlyConfiguration.java  | 439 +++
 .../TestCoprocessorConfiguration.java   |  29 +-
 4 files changed, 472 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4a2e8b85/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java
index 418d624..4fab733 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/CoprocessorEnvironment.java
@@ -45,7 +45,10 @@ public interface CoprocessorEnvironment {
   /** @return the load sequence number */
   int getLoadSequence();
 
-  /** @return the configuration */
+  /**
+   * @return a Read-only Configuration; throws {@link 
UnsupportedOperationException} if you try
+   *   to set a configuration.
+   */
   Configuration getConfiguration();
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/4a2e8b85/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
index ebbca65..9f5ca23 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEnvironment.java
@@ -57,7 +57,7 @@ public class BaseEnvironment 
implements CoprocessorEnviro
 this.priority = priority;
 this.state = Coprocessor.State.INSTALLED;
 this.seq = seq;
-this.conf = conf;
+this.conf = new ReadOnlyConfiguration(conf);
   }
 
   /** Initialize the environment */

http://git-wip-us.apache.org/repos/asf/hbase/blob/4a2e8b85/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.java
new file mode 100644
index 000..7f2ddc8
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/ReadOnlyConfiguration.java
@@ -0,0 +1,439 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.coprocessor;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Reader;
+import java.io.Writer;
+import java.net.InetSocketAddress;
+import java.net.URL;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Pattern;
+

svn commit: r23418 [1/3] - /dev/hbase/hbase-1.4.0RC0/

2017-12-06 Thread apurtell
Author: apurtell
Date: Thu Dec  7 00:27:13 2017
New Revision: 23418

Log:
Stage HBase 1.4.0RC0 artifacts

Added:
dev/hbase/hbase-1.4.0RC0/
dev/hbase/hbase-1.4.0RC0/hbase-1.3.1-1.4.0RC0_compatibility_report.html
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz   (with props)
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.asc
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.md5
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-bin.tar.gz.sha
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz   (with props)
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.asc
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.md5
dev/hbase/hbase-1.4.0RC0/hbase-1.4.0-src.tar.gz.sha



[3/4] hbase git commit: HBASE-19447 INFO level logging of GetClusterStatus from HMaster is too chatty

2017-12-06 Thread apurtell
HBASE-19447 INFO level logging of GetClusterStatus from HMaster is too chatty


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/97d17ae1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/97d17ae1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/97d17ae1

Branch: refs/heads/master
Commit: 97d17ae195cd522d0ce74db0733f63337a36b480
Parents: 4a2e8b8
Author: Andrew Purtell 
Authored: Wed Dec 6 17:05:07 2017 -0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 17:06:34 2017 -0800

--
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java  | 2 --
 1 file changed, 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/97d17ae1/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 16e8587..1c57620 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -2485,8 +2485,6 @@ public class HMaster extends HRegionServer implements 
MasterServices {
   cpHost.preGetClusterStatus();
 }
 ClusterStatus status = getClusterStatusWithoutCoprocessor(options);
-LOG.info(getClientIdAuditPrefix() + " get ClusterStatus, status=" + status
-+ ", options=" + options);
 if (cpHost != null) {
   cpHost.postGetClusterStatus(status);
 }



[2/4] hbase git commit: HBASE-19447 INFO level logging of GetClusterStatus from HMaster is too chatty

2017-12-06 Thread apurtell
HBASE-19447 INFO level logging of GetClusterStatus from HMaster is too chatty


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/70c76882
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/70c76882
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/70c76882

Branch: refs/heads/branch-1
Commit: 70c7688271619cc416468208b455f8f2b070d326
Parents: f88304d
Author: Andrew Purtell 
Authored: Wed Dec 6 17:05:07 2017 -0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 17:05:19 2017 -0800

--
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java   | 1 -
 1 file changed, 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/70c76882/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index a97f9f4..d7fc2e5 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -2415,7 +2415,6 @@ public class HMaster extends HRegionServer implements 
MasterServices, Server {
   cpHost.preGetClusterStatus();
 }
 ClusterStatus status = getClusterStatusWithoutCoprocessor();
-LOG.info(getClientIdAuditPrefix() + " get ClusterStatus, status=" + 
status);
 if (cpHost != null) {
   cpHost.postGetClusterStatus(status);
 }



[1/4] hbase git commit: HBASE-19447 INFO level logging of GetClusterStatus from HMaster is too chatty

2017-12-06 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 f88304d16 -> 70c768827
  refs/heads/branch-1.4 3bc2a6731 -> 099de9088
  refs/heads/branch-2 d29ffd4e2 -> e7a2e4352
  refs/heads/master 4a2e8b852 -> 97d17ae19


HBASE-19447 INFO level logging of GetClusterStatus from HMaster is too chatty


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/099de908
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/099de908
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/099de908

Branch: refs/heads/branch-1.4
Commit: 099de9088b5bcee3d2ce1c96f940e2f198c775c0
Parents: 3bc2a67
Author: Andrew Purtell 
Authored: Wed Dec 6 17:05:07 2017 -0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 17:05:15 2017 -0800

--
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java   | 1 -
 1 file changed, 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/099de908/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index fc8ab28..0f2b8e6 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -2406,7 +2406,6 @@ public class HMaster extends HRegionServer implements 
MasterServices, Server {
   cpHost.preGetClusterStatus();
 }
 ClusterStatus status = getClusterStatusWithoutCoprocessor();
-LOG.info(getClientIdAuditPrefix() + " get ClusterStatus, status=" + 
status);
 if (cpHost != null) {
   cpHost.postGetClusterStatus(status);
 }



[4/4] hbase git commit: HBASE-19447 INFO level logging of GetClusterStatus from HMaster is too chatty

2017-12-06 Thread apurtell
HBASE-19447 INFO level logging of GetClusterStatus from HMaster is too chatty


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e7a2e435
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e7a2e435
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e7a2e435

Branch: refs/heads/branch-2
Commit: e7a2e4352c7c52c21651b3db473936eac8ce33e3
Parents: d29ffd4
Author: Andrew Purtell 
Authored: Wed Dec 6 17:05:07 2017 -0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 17:06:34 2017 -0800

--
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java  | 2 --
 1 file changed, 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e7a2e435/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 16e8587..1c57620 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -2485,8 +2485,6 @@ public class HMaster extends HRegionServer implements 
MasterServices {
   cpHost.preGetClusterStatus();
 }
 ClusterStatus status = getClusterStatusWithoutCoprocessor(options);
-LOG.info(getClientIdAuditPrefix() + " get ClusterStatus, status=" + status
-+ ", options=" + options);
 if (cpHost != null) {
   cpHost.postGetClusterStatus(status);
 }



[1/2] hbase git commit: HBASE-19446 Misspelled 'default' in SimpleRpcScheduler

2017-12-06 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 70c768827 -> 5398d83bc
  refs/heads/branch-1.4 099de9088 -> 8731d9e3e


HBASE-19446 Misspelled 'default' in SimpleRpcScheduler


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8731d9e3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8731d9e3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8731d9e3

Branch: refs/heads/branch-1.4
Commit: 8731d9e3e8f16cb3487336d0e96a02ed4ee5834f
Parents: 099de90
Author: Andrew Purtell 
Authored: Wed Dec 6 17:11:10 2017 -0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 17:11:10 2017 -0800

--
 .../java/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.java   | 6 +++---
 .../org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java| 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8731d9e3/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.java
index 698381c..8fc0023 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.java
@@ -82,14 +82,14 @@ public class SimpleRpcScheduler extends RpcScheduler 
implements ConfigurationObs
 
 if (callqReadShare > 0) {
   // at least 1 read handler and 1 write handler
-  callExecutor = new RWQueueRpcExecutor("deafult.RWQ", Math.max(2, 
handlerCount),
+  callExecutor = new RWQueueRpcExecutor("default.RWQ", Math.max(2, 
handlerCount),
 maxQueueLength, priority, conf, server);
 } else {
   if (RpcExecutor.isFifoQueueType(callQueueType) || 
RpcExecutor.isCodelQueueType(callQueueType)) {
-callExecutor = new FastPathBalancedQueueRpcExecutor("deafult.FPBQ", 
handlerCount,
+callExecutor = new FastPathBalancedQueueRpcExecutor("default.FPBQ", 
handlerCount,
 maxQueueLength, priority, conf, server);
   } else {
-callExecutor = new BalancedQueueRpcExecutor("deafult.BQ", 
handlerCount, maxQueueLength,
+callExecutor = new BalancedQueueRpcExecutor("default.BQ", 
handlerCount, maxQueueLength,
 priority, conf, server);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/8731d9e3/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
index 2fd203c..8ae1a2f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
@@ -410,7 +410,7 @@ public class TestSimpleRpcScheduler {
   @Test
   public void testCoDelScheduling() throws Exception {
 CoDelEnvironmentEdge envEdge = new CoDelEnvironmentEdge();
-envEdge.threadNamePrefixs.add("RpcServer.deafult.FPBQ.Codel.handler");
+envEdge.threadNamePrefixs.add("RpcServer.default.FPBQ.Codel.handler");
 Configuration schedConf = HBaseConfiguration.create();
 schedConf.setInt(RpcScheduler.IPC_SERVER_MAX_CALLQUEUE_LENGTH, 250);
 



[2/2] hbase git commit: HBASE-19446 Misspelled 'default' in SimpleRpcScheduler

2017-12-06 Thread apurtell
HBASE-19446 Misspelled 'default' in SimpleRpcScheduler


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5398d83b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5398d83b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5398d83b

Branch: refs/heads/branch-1
Commit: 5398d83bc71e41752a116f02e935b9f8f8fc198d
Parents: 70c7688
Author: Andrew Purtell 
Authored: Wed Dec 6 17:11:10 2017 -0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 17:11:15 2017 -0800

--
 .../java/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.java   | 6 +++---
 .../org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java| 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5398d83b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.java
index 698381c..8fc0023 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.java
@@ -82,14 +82,14 @@ public class SimpleRpcScheduler extends RpcScheduler 
implements ConfigurationObs
 
 if (callqReadShare > 0) {
   // at least 1 read handler and 1 write handler
-  callExecutor = new RWQueueRpcExecutor("deafult.RWQ", Math.max(2, 
handlerCount),
+  callExecutor = new RWQueueRpcExecutor("default.RWQ", Math.max(2, 
handlerCount),
 maxQueueLength, priority, conf, server);
 } else {
   if (RpcExecutor.isFifoQueueType(callQueueType) || 
RpcExecutor.isCodelQueueType(callQueueType)) {
-callExecutor = new FastPathBalancedQueueRpcExecutor("deafult.FPBQ", 
handlerCount,
+callExecutor = new FastPathBalancedQueueRpcExecutor("default.FPBQ", 
handlerCount,
 maxQueueLength, priority, conf, server);
   } else {
-callExecutor = new BalancedQueueRpcExecutor("deafult.BQ", 
handlerCount, maxQueueLength,
+callExecutor = new BalancedQueueRpcExecutor("default.BQ", 
handlerCount, maxQueueLength,
 priority, conf, server);
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/5398d83b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
index 2fd203c..8ae1a2f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestSimpleRpcScheduler.java
@@ -410,7 +410,7 @@ public class TestSimpleRpcScheduler {
   @Test
   public void testCoDelScheduling() throws Exception {
 CoDelEnvironmentEdge envEdge = new CoDelEnvironmentEdge();
-envEdge.threadNamePrefixs.add("RpcServer.deafult.FPBQ.Codel.handler");
+envEdge.threadNamePrefixs.add("RpcServer.default.FPBQ.Codel.handler");
 Configuration schedConf = HBaseConfiguration.create();
 schedConf.setInt(RpcScheduler.IPC_SERVER_MAX_CALLQUEUE_LENGTH, 250);
 



hbase git commit: Update CHANGES.txt for 1.4.0RC0

2017-12-06 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 8731d9e3e -> 3d571827c


Update CHANGES.txt for 1.4.0RC0


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3d571827
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3d571827
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3d571827

Branch: refs/heads/branch-1.4
Commit: 3d571827cbd72602c28b58ceeb1a982993d015d5
Parents: 8731d9e
Author: Andrew Purtell 
Authored: Wed Dec 6 17:13:17 2017 -0800
Committer: Andrew Purtell 
Committed: Wed Dec 6 17:13:17 2017 -0800

--
 CHANGES.txt | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3d571827/CHANGES.txt
--
diff --git a/CHANGES.txt b/CHANGES.txt
index a833608..817b383 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -451,6 +451,8 @@ Release Notes - HBase - Version 1.4.0 12/18/2017
 * [HBASE-19440] - Not able to enable balancer with RSGroups once disabled
 * [HBASE-19442] - Backport HBASE-19065 to branch-1 
(HRegion#bulkLoadHFiles() should wait for concurrent Region#flush() to finish).
 * [HBASE-19445] - PerformanceEvaluation NPE processing split policy option
+* [HBASE-19446] - Misspelled 'default' in SimpleRpcScheduler
+* [HBASE-19447] - INFO level logging of GetClusterStatus from HMaster is 
too chatty
 
 ** Improvement
 * [HBASE-11013] - Clone Snapshots on Secure Cluster Should provide option 
to apply Retained User Permissions



[hbase] Git Push Summary

2017-12-06 Thread apurtell
Repository: hbase
Updated Tags:  refs/tags/1.4.0RC0 8c720fbd6 -> efad3da11


hbase git commit: HBASE-19417 Remove boolean return value from postBulkLoadHFile hook

2017-12-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master ebd8841e0 -> 27ed4d8ad


HBASE-19417 Remove boolean return value from postBulkLoadHFile hook


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/27ed4d8a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/27ed4d8a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/27ed4d8a

Branch: refs/heads/master
Commit: 27ed4d8add20c5424943eee54acff266567e765a
Parents: ebd8841
Author: tedyu 
Authored: Wed Dec 6 07:06:28 2017 -0800
Committer: tedyu 
Committed: Wed Dec 6 07:06:28 2017 -0800

--
 .../hadoop/hbase/backup/BackupObserver.java | 18 --
 .../hbase/coprocessor/RegionObserver.java   | 11 +--
 .../hbase/regionserver/RSRpcServices.java   | 20 ++--
 .../regionserver/RegionCoprocessorHost.java | 16 +++-
 .../regionserver/SecureBulkLoadManager.java |  6 +-
 .../hbase/coprocessor/SimpleRegionObserver.java |  5 ++---
 6 files changed, 29 insertions(+), 47 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/27ed4d8a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupObserver.java
--
diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupObserver.java 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupObserver.java
index e2b27ff..1d8f780 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupObserver.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupObserver.java
@@ -54,17 +54,17 @@ public class BackupObserver implements RegionCoprocessor, 
RegionObserver {
   }
 
   @Override
-  public boolean 
postBulkLoadHFile(ObserverContext ctx,
-List> stagingFamilyPaths, Map 
finalPaths,
-boolean hasLoaded) throws IOException {
+  public void postBulkLoadHFile(ObserverContext 
ctx,
+List> stagingFamilyPaths, Map 
finalPaths)
+throws IOException {
 Configuration cfg = ctx.getEnvironment().getConfiguration();
-if (!hasLoaded) {
+if (finalPaths == null) {
   // there is no need to record state
-  return hasLoaded;
+  return;
 }
-if (finalPaths == null || !BackupManager.isBackupEnabled(cfg)) {
+if (!BackupManager.isBackupEnabled(cfg)) {
   LOG.debug("skipping recording bulk load in postBulkLoadHFile since 
backup is disabled");
-  return hasLoaded;
+  return;
 }
 try (Connection connection = ConnectionFactory.createConnection(cfg);
 BackupSystemTable tbl = new BackupSystemTable(connection)) {
@@ -75,13 +75,11 @@ public class BackupObserver implements RegionCoprocessor, 
RegionObserver {
 if (LOG.isTraceEnabled()) {
   LOG.trace(tableName + " has not gone thru full backup");
 }
-return hasLoaded;
+return;
   }
   tbl.writePathsPostBulkLoad(tableName, info.getEncodedNameAsBytes(), 
finalPaths);
-  return hasLoaded;
 } catch (IOException ioe) {
   LOG.error("Failed to get tables which have been fully backed up", ioe);
-  return false;
 }
   }
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/27ed4d8a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
index e036441..6b5527b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java
@@ -974,13 +974,12 @@ public interface RegionObserver {
* @param ctx the environment provided by the region server
* @param stagingFamilyPaths pairs of { CF, HFile path } submitted for bulk 
load
* @param finalPaths Map of CF to List of file paths for the loaded files
-   * @param hasLoaded whether the bulkLoad was successful
-   * @return the new value of hasLoaded
+   *   if the Map is not null, the bulkLoad was successful. Otherwise the bulk 
load failed.
+   *   bulkload is done by the time this hook is called.
*/
-  default boolean 
postBulkLoadHFile(ObserverContext ctx,
-  List> stagingFamilyPaths, Map 
finalPaths,
-  boolean hasLoaded) throws IOException {
-return hasLoaded;
+  default void postBulkLoadHFile(ObserverContext 
ctx,
+  List> stagingFamilyPaths, Map 
finalPaths)
+  throws IOException 

hbase git commit: HBASE-18112 (addendum) fix the out-of-bounds index

2017-12-06 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 75cdbb570 -> ebd8841e0


HBASE-18112 (addendum) fix the out-of-bounds index


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ebd8841e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ebd8841e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ebd8841e

Branch: refs/heads/master
Commit: ebd8841e0ee9ca1ab7b6dab55178761360b8d85a
Parents: 75cdbb5
Author: Chia-Ping Tsai 
Authored: Wed Dec 6 21:54:45 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Dec 6 21:54:45 2017 +0800

--
 .../org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java| 8 ++--
 1 file changed, 2 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ebd8841e/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java
index d600712..1b395a4 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java
@@ -95,18 +95,14 @@ public class NettyRpcFrameDecoder extends 
ByteToMessageDecoder {
 }
 
 int frameLengthInt = (int) frameLength;
-if (in.readableBytes() < frameLengthInt) {
+if (in.readableBytes() < frameLengthInt + FRAME_LENGTH_FIELD_LENGTH) {
   return;
 }
 
 in.skipBytes(FRAME_LENGTH_FIELD_LENGTH);
 
 // extract frame
-int readerIndex = in.readerIndex();
-ByteBuf frame = in.retainedSlice(readerIndex, frameLengthInt);
-in.readerIndex(readerIndex + frameLengthInt);
-
-out.add(frame);
+out.add(in.readRetainedSlice(frameLengthInt));
   }
 
   private void handleTooBigRequest(ByteBuf in) throws IOException {



hbase git commit: HBASE-18112 (addendum) fix the out-of-bounds index

2017-12-06 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 03cb58158 -> b9f1f5a17


HBASE-18112 (addendum) fix the out-of-bounds index


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b9f1f5a1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b9f1f5a1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b9f1f5a1

Branch: refs/heads/branch-2
Commit: b9f1f5a17cc60d5f690fdda236208a7ed7207d64
Parents: 03cb581
Author: Chia-Ping Tsai 
Authored: Wed Dec 6 21:54:45 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Dec 6 22:01:12 2017 +0800

--
 .../org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java| 8 ++--
 1 file changed, 2 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b9f1f5a1/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java
index d600712..1b395a4 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.java
@@ -95,18 +95,14 @@ public class NettyRpcFrameDecoder extends 
ByteToMessageDecoder {
 }
 
 int frameLengthInt = (int) frameLength;
-if (in.readableBytes() < frameLengthInt) {
+if (in.readableBytes() < frameLengthInt + FRAME_LENGTH_FIELD_LENGTH) {
   return;
 }
 
 in.skipBytes(FRAME_LENGTH_FIELD_LENGTH);
 
 // extract frame
-int readerIndex = in.readerIndex();
-ByteBuf frame = in.retainedSlice(readerIndex, frameLengthInt);
-in.readerIndex(readerIndex + frameLengthInt);
-
-out.add(frame);
+out.add(in.readRetainedSlice(frameLengthInt));
   }
 
   private void handleTooBigRequest(ByteBuf in) throws IOException {



[09/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependencies.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependencies.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependencies.html
index cfdb581..836f61d 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependencies.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-client archetype  Project 
Dependencies
 
@@ -924,216 +924,223 @@
 jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
+org.apache.hbase
+http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
+3.0.0-SNAPSHOT
+tests
+test-jar
+https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
+
 org.apache.htrace
 http://incubator.apache.org/projects/htrace.html;>htrace-core
 3.2.0-incubating
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0
-
+
 org.apache.zookeeper
 zookeeper
 3.4.6
 tests
 test-jar
 -
-
+
 org.codehaus.jackson
 http://jackson.codehaus.org;>jackson-core-asl
 1.9.13
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0
-
+
 org.codehaus.jackson
 http://jackson.codehaus.org;>jackson-mapper-asl
 1.9.13
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-http
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-io
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-security
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-server
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-servlet
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-util
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-util-ajax
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-webapp
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-xml
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.fusesource.leveldbjni
 http://leveldbjni.fusesource.org/leveldbjni-all;>leveldbjni-all
 1.8
 -
 jar
 http://www.opensource.org/licenses/BSD-3-Clause;>The BSD 3-Clause 
License
-
+
 org.glassfish
 http://uel.java.net;>javax.el
 3.0.1-b08
 -
 jar
 https://glassfish.dev.java.net/nonav/public/CDDL+GPL.html;>CDDL + GPLv2 
with classpath exception
-
+
 org.glassfish.hk2
 https://hk2.java.net/hk2-api;>hk2-api
 2.5.0-b32
 -
 jar
 https://glassfish.java.net/nonav/public/CDDL+GPL_1_1.html;>CDDL + GPLv2 
with classpath exception
-
+
 org.glassfish.hk2
 https://hk2.java.net/hk2-locator;>hk2-locator
 2.5.0-b32
 -
 jar
 https://glassfish.java.net/nonav/public/CDDL+GPL_1_1.html;>CDDL + GPLv2 
with classpath exception
-
+
 org.glassfish.hk2
 https://hk2.java.net/hk2-utils;>hk2-utils
 2.5.0-b32
 -
 jar
 https://glassfish.java.net/nonav/public/CDDL+GPL_1_1.html;>CDDL + GPLv2 
with classpath exception
-
+
 

[06/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependencies.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependencies.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependencies.html
index 6589991..a9afb8f 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependencies.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Project Dependencies
 
@@ -930,216 +930,223 @@
 jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
+org.apache.hbase
+http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
+3.0.0-SNAPSHOT
+tests
+test-jar
+https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
+
 org.apache.htrace
 http://incubator.apache.org/projects/htrace.html;>htrace-core
 3.2.0-incubating
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0
-
+
 org.apache.zookeeper
 zookeeper
 3.4.6
 tests
 test-jar
 -
-
+
 org.codehaus.jackson
 http://jackson.codehaus.org;>jackson-core-asl
 1.9.13
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0
-
+
 org.codehaus.jackson
 http://jackson.codehaus.org;>jackson-mapper-asl
 1.9.13
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-http
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-io
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-security
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-server
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-servlet
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-util
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-util-ajax
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-webapp
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-xml
 9.3.19.v20170502
 -
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.fusesource.leveldbjni
 http://leveldbjni.fusesource.org/leveldbjni-all;>leveldbjni-all
 1.8
 -
 jar
 http://www.opensource.org/licenses/BSD-3-Clause;>The BSD 3-Clause 
License
-
+
 org.glassfish
 http://uel.java.net;>javax.el
 3.0.1-b08
 -
 jar
 https://glassfish.dev.java.net/nonav/public/CDDL+GPL.html;>CDDL + GPLv2 
with classpath exception
-
+
 org.glassfish.hk2
 https://hk2.java.net/hk2-api;>hk2-api
 2.5.0-b32
 -
 jar
 https://glassfish.java.net/nonav/public/CDDL+GPL_1_1.html;>CDDL + GPLv2 
with classpath exception
-
+
 org.glassfish.hk2
 https://hk2.java.net/hk2-locator;>hk2-locator
 2.5.0-b32
 -
 jar
 https://glassfish.java.net/nonav/public/CDDL+GPL_1_1.html;>CDDL + GPLv2 
with classpath exception
-
+
 org.glassfish.hk2
 https://hk2.java.net/hk2-utils;>hk2-utils
 2.5.0-b32
 -
 jar
 https://glassfish.java.net/nonav/public/CDDL+GPL_1_1.html;>CDDL + GPLv2 

[10/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-info.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-info.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-info.html
index 31dca6b..563d5b8 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-info.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetype builder  Dependency 
Information
 
@@ -148,7 +148,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
index b60f886..4b02c4e 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetype builder  Project Dependency 
Management
 
@@ -775,18 +775,24 @@
 test-jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
+org.apache.hbase
+http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
+3.0.0-SNAPSHOT
+test-jar
+https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
+
 org.bouncycastle
 http://www.bouncycastle.org/java.html;>bcprov-jdk16
 1.46
 jar
 http://www.bouncycastle.org/licence.html;>Bouncy Castle 
Licence
-
+
 org.hamcrest
 https://github.com/hamcrest/JavaHamcrest/hamcrest-core;>hamcrest-core
 1.3
 jar
 http://www.opensource.org/licenses/bsd-license.php;>New BSD 
License
-
+
 org.mockito
 http://mockito.org;>mockito-core
 2.1.0
@@ -804,7 +810,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
index e1ea5fc..cc6dfe3 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/index.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetype builder  About
 
@@ -119,7 +119,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
index d9fd2e6..2fdfea7 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/integration.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetype builder  CI Management
 
@@ -126,7 +126,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/issue-tracking.html

[20/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
index 62bc799..5c004ce 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
@@ -250,7 +250,7 @@
 242Cell kv = cell;
 243// null input == user explicitly 
wants to flush
 244if (row == null  kv == 
null) {
-245  rollWriters();
+245  rollWriters(null);
 246  return;
 247}
 248
@@ -284,636 +284,642 @@
 276  configureStoragePolicy(conf, 
fs, tableAndFamily, writerPath);
 277}
 278
-279// If any of the HFiles for the 
column families has reached
-280// maxsize, we need to roll all 
the writers
-281if (wl != null  
wl.written + length = maxsize) {
-282  this.rollRequested = true;
-283}
-284
-285// This can only happen once a 
row is finished though
-286if (rollRequested  
Bytes.compareTo(this.previousRow, rowKey) != 0) {
-287  rollWriters();
-288}
-289
-290// create a new WAL writer, if 
necessary
-291if (wl == null || wl.writer == 
null) {
-292  if 
(conf.getBoolean(LOCALITY_SENSITIVE_CONF_KEY, DEFAULT_LOCALITY_SENSITIVE)) {
-293HRegionLocation loc = null;
-294
-295String tableName = 
Bytes.toString(tableNameBytes);
-296if (tableName != null) {
-297  try (Connection connection 
= ConnectionFactory.createConnection(conf);
-298 RegionLocator 
locator =
-299   
connection.getRegionLocator(TableName.valueOf(tableName))) {
-300loc = 
locator.getRegionLocation(rowKey);
-301  } catch (Throwable e) {
-302LOG.warn("There's 
something wrong when locating rowkey: " +
-303  Bytes.toString(rowKey) 
+ " for tablename: " + tableName, e);
-304loc = null;
-305  } }
-306
-307if (null == loc) {
-308  if (LOG.isTraceEnabled()) 
{
-309LOG.trace("failed to get 
region location, so use default writer for rowkey: " +
-310  
Bytes.toString(rowKey));
-311  }
-312  wl = 
getNewWriter(tableNameBytes, family, conf, null);
-313} else {
-314  if (LOG.isDebugEnabled()) 
{
-315LOG.debug("first rowkey: 
[" + Bytes.toString(rowKey) + "]");
-316  }
-317  InetSocketAddress 
initialIsa =
-318  new 
InetSocketAddress(loc.getHostname(), loc.getPort());
-319  if 
(initialIsa.isUnresolved()) {
-320if (LOG.isTraceEnabled()) 
{
-321  LOG.trace("failed to 
resolve bind address: " + loc.getHostname() + ":"
-322  + loc.getPort() + 
", so use default writer");
-323}
-324wl = 
getNewWriter(tableNameBytes, family, conf, null);
-325  } else {
-326if (LOG.isDebugEnabled()) 
{
-327  LOG.debug("use favored 
nodes writer: " + initialIsa.getHostString());
-328}
-329wl = 
getNewWriter(tableNameBytes, family, conf, new InetSocketAddress[] { 
initialIsa
-330});
-331  }
-332}
-333  } else {
-334wl = 
getNewWriter(tableNameBytes, family, conf, null);
-335  }
-336}
-337
-338// we now have the proper WAL 
writer. full steam ahead
-339// TODO : Currently in 
SettableTimeStamp but this will also move to ExtendedCell
-340
PrivateCellUtil.updateLatestStamp(cell, this.now);
-341wl.writer.append(kv);
-342wl.written += length;
-343
-344// Copy the row so we know when a 
row transition.
-345this.previousRow = rowKey;
-346  }
-347
-348  private void rollWriters() throws 
IOException {
-349for (WriterLength wl : 
this.writers.values()) {
-350  if (wl.writer != null) {
-351LOG.info(
-352"Writer=" + 
wl.writer.getPath() + ((wl.written == 0)? "": ", wrote=" + wl.written));
-353close(wl.writer);
-354  }
-355  wl.writer = null;
-356  wl.written = 0;
-357}
-358this.rollRequested = false;
-359  }
-360
-361  /*
-362   * Create a new StoreFile.Writer.
-363   * @param family
-364   * @return A WriterLength, 
containing a new StoreFile.Writer.
-365   * @throws IOException
-366   */
-367  

[02/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-spark/dependency-convergence.html
--
diff --git a/hbase-build-configuration/hbase-spark/dependency-convergence.html 
b/hbase-build-configuration/hbase-spark/dependency-convergence.html
index 935303d..3818567 100644
--- a/hbase-build-configuration/hbase-spark/dependency-convergence.html
+++ b/hbase-build-configuration/hbase-spark/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Spark  Reactor Dependency Convergence
 
@@ -488,22 +488,22 @@
 3.4.10
 
 
-org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.zookeeper:zookeeper:jar:3.4.10:compile|+-org.apache.hadoop:hadoop-common:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.had
 oop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAP
 SHOT:test|+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-minicluster:jar:2.7.4:test|+-org.apache.hadoop:hadoop-common:test-jar:tests:2.7.4:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.7.4:test|\-org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.7.4:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for dupli
 cate)+-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)\-org.apache.hbase:hbase-rsgroup:jar:3.0.0-SNAPSHOT:compile\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile 
- version managed from 3.4.6
 ; omitted for duplicate)
-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for dup
 licate)|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 
version managed from 3.4.6; omitted for duplicat
 

[01/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 89207f3b0 -> d171b8965


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-spark/dependency-info.html
--
diff --git a/hbase-build-configuration/hbase-spark/dependency-info.html 
b/hbase-build-configuration/hbase-spark/dependency-info.html
index 9742b61..ea6e8d0 100644
--- a/hbase-build-configuration/hbase-spark/dependency-info.html
+++ b/hbase-build-configuration/hbase-spark/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Spark  Dependency Information
 
@@ -147,7 +147,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 



[14/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/dependency-info.html
--
diff --git a/hbase-build-configuration/dependency-info.html 
b/hbase-build-configuration/dependency-info.html
index 6583a46..45d3c10 100644
--- a/hbase-build-configuration/dependency-info.html
+++ b/hbase-build-configuration/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Build Configuration  Dependency 
Information
 
@@ -148,7 +148,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/dependency-management.html
--
diff --git a/hbase-build-configuration/dependency-management.html 
b/hbase-build-configuration/dependency-management.html
index 9b2b9d2..e2e6696 100644
--- a/hbase-build-configuration/dependency-management.html
+++ b/hbase-build-configuration/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Build Configuration  Project Dependency 
Management
 
@@ -775,18 +775,24 @@
 test-jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
+org.apache.hbase
+http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
+3.0.0-SNAPSHOT
+test-jar
+https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
+
 org.bouncycastle
 http://www.bouncycastle.org/java.html;>bcprov-jdk16
 1.46
 jar
 http://www.bouncycastle.org/licence.html;>Bouncy Castle 
Licence
-
+
 org.hamcrest
 https://github.com/hamcrest/JavaHamcrest/hamcrest-core;>hamcrest-core
 1.3
 jar
 http://www.opensource.org/licenses/bsd-license.php;>New BSD 
License
-
+
 org.mockito
 http://mockito.org;>mockito-core
 2.1.0
@@ -804,7 +810,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/dependencies.html
--
diff --git a/hbase-build-configuration/hbase-archetypes/dependencies.html 
b/hbase-build-configuration/hbase-archetypes/dependencies.html
index 92e01bd..e45d3b3 100644
--- a/hbase-build-configuration/hbase-archetypes/dependencies.html
+++ b/hbase-build-configuration/hbase-archetypes/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetypes  Project Dependencies
 
@@ -330,7 +330,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 



[19/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
index 62bc799..5c004ce 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
@@ -250,7 +250,7 @@
 242Cell kv = cell;
 243// null input == user explicitly 
wants to flush
 244if (row == null  kv == 
null) {
-245  rollWriters();
+245  rollWriters(null);
 246  return;
 247}
 248
@@ -284,636 +284,642 @@
 276  configureStoragePolicy(conf, 
fs, tableAndFamily, writerPath);
 277}
 278
-279// If any of the HFiles for the 
column families has reached
-280// maxsize, we need to roll all 
the writers
-281if (wl != null  
wl.written + length = maxsize) {
-282  this.rollRequested = true;
-283}
-284
-285// This can only happen once a 
row is finished though
-286if (rollRequested  
Bytes.compareTo(this.previousRow, rowKey) != 0) {
-287  rollWriters();
-288}
-289
-290// create a new WAL writer, if 
necessary
-291if (wl == null || wl.writer == 
null) {
-292  if 
(conf.getBoolean(LOCALITY_SENSITIVE_CONF_KEY, DEFAULT_LOCALITY_SENSITIVE)) {
-293HRegionLocation loc = null;
-294
-295String tableName = 
Bytes.toString(tableNameBytes);
-296if (tableName != null) {
-297  try (Connection connection 
= ConnectionFactory.createConnection(conf);
-298 RegionLocator 
locator =
-299   
connection.getRegionLocator(TableName.valueOf(tableName))) {
-300loc = 
locator.getRegionLocation(rowKey);
-301  } catch (Throwable e) {
-302LOG.warn("There's 
something wrong when locating rowkey: " +
-303  Bytes.toString(rowKey) 
+ " for tablename: " + tableName, e);
-304loc = null;
-305  } }
-306
-307if (null == loc) {
-308  if (LOG.isTraceEnabled()) 
{
-309LOG.trace("failed to get 
region location, so use default writer for rowkey: " +
-310  
Bytes.toString(rowKey));
-311  }
-312  wl = 
getNewWriter(tableNameBytes, family, conf, null);
-313} else {
-314  if (LOG.isDebugEnabled()) 
{
-315LOG.debug("first rowkey: 
[" + Bytes.toString(rowKey) + "]");
-316  }
-317  InetSocketAddress 
initialIsa =
-318  new 
InetSocketAddress(loc.getHostname(), loc.getPort());
-319  if 
(initialIsa.isUnresolved()) {
-320if (LOG.isTraceEnabled()) 
{
-321  LOG.trace("failed to 
resolve bind address: " + loc.getHostname() + ":"
-322  + loc.getPort() + 
", so use default writer");
-323}
-324wl = 
getNewWriter(tableNameBytes, family, conf, null);
-325  } else {
-326if (LOG.isDebugEnabled()) 
{
-327  LOG.debug("use favored 
nodes writer: " + initialIsa.getHostString());
-328}
-329wl = 
getNewWriter(tableNameBytes, family, conf, new InetSocketAddress[] { 
initialIsa
-330});
-331  }
-332}
-333  } else {
-334wl = 
getNewWriter(tableNameBytes, family, conf, null);
-335  }
-336}
-337
-338// we now have the proper WAL 
writer. full steam ahead
-339// TODO : Currently in 
SettableTimeStamp but this will also move to ExtendedCell
-340
PrivateCellUtil.updateLatestStamp(cell, this.now);
-341wl.writer.append(kv);
-342wl.written += length;
-343
-344// Copy the row so we know when a 
row transition.
-345this.previousRow = rowKey;
-346  }
-347
-348  private void rollWriters() throws 
IOException {
-349for (WriterLength wl : 
this.writers.values()) {
-350  if (wl.writer != null) {
-351LOG.info(
-352"Writer=" + 
wl.writer.getPath() + ((wl.written == 0)? "": ", wrote=" + wl.written));
-353close(wl.writer);
-354  }
-355  wl.writer = null;
-356  wl.written = 0;
-357}
-358this.rollRequested = false;
-359  }
-360
-361  /*
-362   * Create a new StoreFile.Writer.
-363   * @param family
-364   * @return A WriterLength, 
containing a new StoreFile.Writer.
-365   * @throws IOException
-366   */
-367  

[18/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/export_control.html
--
diff --git a/export_control.html b/export_control.html
index c3e1d56..e4942e0 100644
--- a/export_control.html
+++ b/export_control.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Export Control
@@ -336,7 +336,7 @@ for more details.
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-annotations/checkstyle.html
--
diff --git a/hbase-annotations/checkstyle.html 
b/hbase-annotations/checkstyle.html
index f1b188d..af35154 100644
--- a/hbase-annotations/checkstyle.html
+++ b/hbase-annotations/checkstyle.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  Checkstyle Results
 
@@ -178,7 +178,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-annotations/dependencies.html
--
diff --git a/hbase-annotations/dependencies.html 
b/hbase-annotations/dependencies.html
index faa7977..23b379a 100644
--- a/hbase-annotations/dependencies.html
+++ b/hbase-annotations/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  Project Dependencies
 
@@ -272,7 +272,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 



[38/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index c3bd096..52a5378 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 2007 - 2017 The Apache Software Foundation
 
-  File: 3448,
- Errors: 20747,
+  File: 3449,
+ Errors: 20698,
  Warnings: 0,
  Infos: 0
   
@@ -489,7 +489,7 @@ under the License.
   0
 
 
-  2
+  0
 
   
   
@@ -1105,7 +1105,7 @@ under the License.
   0
 
 
-  21
+  23
 
   
   
@@ -10331,7 +10331,7 @@ under the License.
   0
 
 
-  6
+  4
 
   
   
@@ -11204,6 +11204,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.HBaseZKTestingUtility.java;>org/apache/hadoop/hbase/HBaseZKTestingUtility.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.thrift.generated.IllegalArgument.java;>org/apache/hadoop/hbase/thrift/generated/IllegalArgument.java
 
 
@@ -11764,7 +11778,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.handler.CloseMetaHandler.java;>org/apache/hadoop/hbase/regionserver/handler/CloseMetaHandler.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.zookeeper.TestZKMainServer.java;>org/apache/hadoop/hbase/zookeeper/TestZKMainServer.java
 
 
   0
@@ -11778,7 +11792,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.zookeeper.TestZKMainServer.java;>org/apache/hadoop/hbase/zookeeper/TestZKMainServer.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.regionserver.handler.CloseMetaHandler.java;>org/apache/hadoop/hbase/regionserver/handler/CloseMetaHandler.java
 
 
   0
@@ -11787,7 +11801,7 @@ under the License.
   0
 
 
-  2
+  0
 
   
   
@@ -14041,7 +14055,7 @@ under the License.
   0
 
 
-  14
+  13
 
   
   
@@ -14284,7 +14298,7 @@ under the License.
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.http.resource.JerseyResource.java;>org/apache/hadoop/hbase/http/resource/JerseyResource.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.zookeeper.TestZKMulti.java;>org/apache/hadoop/hbase/zookeeper/TestZKMulti.java
 
 
   0
@@ -14293,12 +14307,12 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
 
-  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.zookeeper.TestZKMulti.java;>org/apache/hadoop/hbase/zookeeper/TestZKMulti.java
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.http.resource.JerseyResource.java;>org/apache/hadoop/hbase/http/resource/JerseyResource.java
 
 
   0
@@ -14307,7 +14321,7 @@ under the License.
   0
 
 
-  0
+  1
 
   
   
@@ -17597,7 +17611,7 @@ under the License.
   0
 
 
-  19
+  9
 
   
   
@@ -28965,7 +28979,7 @@ under the License.
   0
 
 
-  1
+ 

[36/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index 1ded7b5..def8d80 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Dependency Information
 
@@ -318,7 +318,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index b9e0096..5d8e46c 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependency Management
 
@@ -945,18 +945,24 @@
 test-jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
+org.apache.hbase
+http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
+3.0.0-SNAPSHOT
+test-jar
+https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
+
 org.bouncycastle
 http://www.bouncycastle.org/java.html;>bcprov-jdk16
 1.46
 jar
 http://www.bouncycastle.org/licence.html;>Bouncy Castle 
Licence
-
+
 org.hamcrest
 https://github.com/hamcrest/JavaHamcrest/hamcrest-core;>hamcrest-core
 1.3
 jar
 http://www.opensource.org/licenses/bsd-license.php;>New BSD 
License
-
+
 org.mockito
 http://mockito.org;>mockito-core
 2.1.0
@@ -974,7 +980,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/constant-values.html
--
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 42867bb..b98 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -3705,21 +3705,21 @@
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 date
-"Tue Dec  5 14:42:18 UTC 2017"
+"Wed Dec  6 14:42:16 UTC 2017"
 
 
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 revision
-"e29685ed6d144e6c33140bd8c9323101eb123e54"
+"ebd8841e0ee9ca1ab7b6dab55178761360b8d85a"
 
 
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 srcChecksum
-"5f0f4700b0461f81ca48e8b9acfdc7fa"
+"3a84e36c1447b2269eb4eb0e7f8272a2"
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index eb11eb0..4049048 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -32520,13 +32520,13 @@
 
 get()
 - Method in class org.apache.hadoop.hbase.client.HTableMultiplexer.AtomicAverageCounter
 
-get()
 - Static method in class org.apache.hadoop.hbase.client.PerClientRandomNonceGenerator
+get(byte[],
 byte[]) - Method in class org.apache.hadoop.hbase.client.Mutation
 
-Get the singleton nonce generator.
+Returns a list of all KeyValue objects with matching column 
family and qualifier.
 
-get(byte[], 
byte[]) - Method in class org.apache.hadoop.hbase.client.Put
+get()
 - Static method in class org.apache.hadoop.hbase.client.PerClientRandomNonceGenerator
 
-Returns a list of all KeyValue objects with matching column 
family and qualifier.
+Get the singleton nonce generator.
 
 get(Get)
 - Method in class org.apache.hadoop.hbase.client.RawAsyncTableImpl
 
@@ -51911,27 +51911,27 @@
 
 hard capacity limit
 
-has(byte[], 
byte[]) - Method in class org.apache.hadoop.hbase.client.Put
+has(byte[],
 byte[]) - Method in class org.apache.hadoop.hbase.client.Mutation
 
 A convenience method to determine if this object's 
familyMap contains
  a value assigned to the given family  qualifier.
 
-has(byte[],
 byte[], long) - Method in class org.apache.hadoop.hbase.client.Put
+has(byte[],
 byte[], long) - Method in class org.apache.hadoop.hbase.client.Mutation
 
 A convenience method to determine if this object's 
familyMap contains
  a value assigned to the given family, qualifier and timestamp.
 
-has(byte[],
 byte[], byte[]) - 

[23/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/RegionObserver.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
index 4c37cbe..cfb8ee4 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
@@ -228,837 +228,853 @@
 220   * of candidates. If you remove all the 
candidates then the compaction will be canceled.
 221   * pSupports Coprocessor 
'bypass' -- 'bypass' is how this method indicates that it changed
 222   * the passed in 
codecandidates/code.
-223   * @param c the environment provided by 
the region server
-224   * @param store the store where 
compaction is being requested
-225   * @param candidates the store files 
currently available for compaction
-226   * @param tracker tracker used to track 
the life cycle of a compaction
-227   */
-228  default void 
preCompactSelection(ObserverContextRegionCoprocessorEnvironment c, 
Store store,
-229  List? extends StoreFile 
candidates, CompactionLifeCycleTracker tracker)
-230  throws IOException {}
-231
-232  /**
-233   * Called after the {@link StoreFile}s 
to compact have been selected from the available
-234   * candidates.
-235   * @param c the environment provided by 
the region server
-236   * @param store the store being 
compacted
-237   * @param selected the store files 
selected to compact
-238   * @param tracker tracker used to track 
the life cycle of a compaction
-239   * @param request the requested 
compaction
-240   */
-241  default void 
postCompactSelection(ObserverContextRegionCoprocessorEnvironment c, 
Store store,
-242  List? extends StoreFile 
selected, CompactionLifeCycleTracker tracker,
-243  CompactionRequest request) {}
-244
-245  /**
-246   * Called before we open store scanner 
for compaction. You can use the {@code options} to change max
-247   * versions and TTL for the scanner 
being opened.
-248   * @param c the environment provided by 
the region server
-249   * @param store the store being 
compacted
-250   * @param scanType type of Scan
-251   * @param options used to change max 
versions and TTL for the scanner being opened
-252   * @param tracker tracker used to track 
the life cycle of a compaction
-253   * @param request the requested 
compaction
-254   */
-255  default void 
preCompactScannerOpen(ObserverContextRegionCoprocessorEnvironment c, 
Store store,
-256  ScanType scanType, ScanOptions 
options, CompactionLifeCycleTracker tracker,
-257  CompactionRequest request) throws 
IOException {}
-258
-259  /**
-260   * Called prior to writing the {@link 
StoreFile}s selected for compaction into a new
-261   * {@code StoreFile}.
-262   * p
-263   * To override or modify the compaction 
process, implementing classes can wrap the provided
-264   * {@link InternalScanner} with a 
custom implementation that is returned from this method. The
-265   * custom scanner can then inspect 
{@link org.apache.hadoop.hbase.Cell}s from the wrapped scanner,
-266   * applying its own policy to what gets 
written.
-267   * @param c the environment provided by 
the region server
-268   * @param store the store being 
compacted
-269   * @param scanner the scanner over 
existing data used in the store file rewriting
-270   * @param scanType type of Scan
-271   * @param tracker tracker used to track 
the life cycle of a compaction
-272   * @param request the requested 
compaction
-273   * @return the scanner to use during 
compaction. Should not be {@code null} unless the
-274   * implementation is writing 
new store files on its own.
-275   */
-276  default InternalScanner 
preCompact(ObserverContextRegionCoprocessorEnvironment c, Store 
store,
-277  InternalScanner scanner, ScanType 
scanType, CompactionLifeCycleTracker tracker,
-278  CompactionRequest request) throws 
IOException {
-279return scanner;
-280  }
-281
-282  /**
-283   * Called after compaction has 
completed and the new store file has been moved in to place.
-284   * @param c the environment provided by 
the region server
-285   * @param store the store being 
compacted
-286   * @param resultFile the new store file 
written out during compaction
-287   * @param tracker used to track the 
life cycle of a compaction
-288   * @param request the requested 
compaction
-289   */
-290  default void 
postCompact(ObserverContextRegionCoprocessorEnvironment c, Store 
store,
-291  StoreFile resultFile, 
CompactionLifeCycleTracker tracker, CompactionRequest request)
-292  throws IOException {}
-293
-294  /**
-295   * Called before the region is reported 
as closed to the master.
-296   * @param c the environment provided by 
the region server
-297   * @param 

[41/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html 
b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
index 62bc799..5c004ce 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
@@ -250,7 +250,7 @@
 242Cell kv = cell;
 243// null input == user explicitly 
wants to flush
 244if (row == null  kv == 
null) {
-245  rollWriters();
+245  rollWriters(null);
 246  return;
 247}
 248
@@ -284,636 +284,642 @@
 276  configureStoragePolicy(conf, 
fs, tableAndFamily, writerPath);
 277}
 278
-279// If any of the HFiles for the 
column families has reached
-280// maxsize, we need to roll all 
the writers
-281if (wl != null  
wl.written + length = maxsize) {
-282  this.rollRequested = true;
-283}
-284
-285// This can only happen once a 
row is finished though
-286if (rollRequested  
Bytes.compareTo(this.previousRow, rowKey) != 0) {
-287  rollWriters();
-288}
-289
-290// create a new WAL writer, if 
necessary
-291if (wl == null || wl.writer == 
null) {
-292  if 
(conf.getBoolean(LOCALITY_SENSITIVE_CONF_KEY, DEFAULT_LOCALITY_SENSITIVE)) {
-293HRegionLocation loc = null;
-294
-295String tableName = 
Bytes.toString(tableNameBytes);
-296if (tableName != null) {
-297  try (Connection connection 
= ConnectionFactory.createConnection(conf);
-298 RegionLocator 
locator =
-299   
connection.getRegionLocator(TableName.valueOf(tableName))) {
-300loc = 
locator.getRegionLocation(rowKey);
-301  } catch (Throwable e) {
-302LOG.warn("There's 
something wrong when locating rowkey: " +
-303  Bytes.toString(rowKey) 
+ " for tablename: " + tableName, e);
-304loc = null;
-305  } }
-306
-307if (null == loc) {
-308  if (LOG.isTraceEnabled()) 
{
-309LOG.trace("failed to get 
region location, so use default writer for rowkey: " +
-310  
Bytes.toString(rowKey));
-311  }
-312  wl = 
getNewWriter(tableNameBytes, family, conf, null);
-313} else {
-314  if (LOG.isDebugEnabled()) 
{
-315LOG.debug("first rowkey: 
[" + Bytes.toString(rowKey) + "]");
-316  }
-317  InetSocketAddress 
initialIsa =
-318  new 
InetSocketAddress(loc.getHostname(), loc.getPort());
-319  if 
(initialIsa.isUnresolved()) {
-320if (LOG.isTraceEnabled()) 
{
-321  LOG.trace("failed to 
resolve bind address: " + loc.getHostname() + ":"
-322  + loc.getPort() + 
", so use default writer");
-323}
-324wl = 
getNewWriter(tableNameBytes, family, conf, null);
-325  } else {
-326if (LOG.isDebugEnabled()) 
{
-327  LOG.debug("use favored 
nodes writer: " + initialIsa.getHostString());
-328}
-329wl = 
getNewWriter(tableNameBytes, family, conf, new InetSocketAddress[] { 
initialIsa
-330});
-331  }
-332}
-333  } else {
-334wl = 
getNewWriter(tableNameBytes, family, conf, null);
-335  }
-336}
-337
-338// we now have the proper WAL 
writer. full steam ahead
-339// TODO : Currently in 
SettableTimeStamp but this will also move to ExtendedCell
-340
PrivateCellUtil.updateLatestStamp(cell, this.now);
-341wl.writer.append(kv);
-342wl.written += length;
-343
-344// Copy the row so we know when a 
row transition.
-345this.previousRow = rowKey;
-346  }
-347
-348  private void rollWriters() throws 
IOException {
-349for (WriterLength wl : 
this.writers.values()) {
-350  if (wl.writer != null) {
-351LOG.info(
-352"Writer=" + 
wl.writer.getPath() + ((wl.written == 0)? "": ", wrote=" + wl.written));
-353close(wl.writer);
-354  }
-355  wl.writer = null;
-356  wl.written = 0;
-357}
-358this.rollRequested = false;
-359  }
-360
-361  /*
-362   * Create a new StoreFile.Writer.
-363   * @param family
-364   * @return A WriterLength, 
containing a new StoreFile.Writer.
-365   * @throws IOException
-366   */
-367  
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="BX_UNBOXING_IMMEDIATELY_REBOXED",
-368  

[24/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html
index 4c37cbe..cfb8ee4 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/RegionObserver.MutationType.html
@@ -228,837 +228,853 @@
 220   * of candidates. If you remove all the 
candidates then the compaction will be canceled.
 221   * pSupports Coprocessor 
'bypass' -- 'bypass' is how this method indicates that it changed
 222   * the passed in 
codecandidates/code.
-223   * @param c the environment provided by 
the region server
-224   * @param store the store where 
compaction is being requested
-225   * @param candidates the store files 
currently available for compaction
-226   * @param tracker tracker used to track 
the life cycle of a compaction
-227   */
-228  default void 
preCompactSelection(ObserverContextRegionCoprocessorEnvironment c, 
Store store,
-229  List? extends StoreFile 
candidates, CompactionLifeCycleTracker tracker)
-230  throws IOException {}
-231
-232  /**
-233   * Called after the {@link StoreFile}s 
to compact have been selected from the available
-234   * candidates.
-235   * @param c the environment provided by 
the region server
-236   * @param store the store being 
compacted
-237   * @param selected the store files 
selected to compact
-238   * @param tracker tracker used to track 
the life cycle of a compaction
-239   * @param request the requested 
compaction
-240   */
-241  default void 
postCompactSelection(ObserverContextRegionCoprocessorEnvironment c, 
Store store,
-242  List? extends StoreFile 
selected, CompactionLifeCycleTracker tracker,
-243  CompactionRequest request) {}
-244
-245  /**
-246   * Called before we open store scanner 
for compaction. You can use the {@code options} to change max
-247   * versions and TTL for the scanner 
being opened.
-248   * @param c the environment provided by 
the region server
-249   * @param store the store being 
compacted
-250   * @param scanType type of Scan
-251   * @param options used to change max 
versions and TTL for the scanner being opened
-252   * @param tracker tracker used to track 
the life cycle of a compaction
-253   * @param request the requested 
compaction
-254   */
-255  default void 
preCompactScannerOpen(ObserverContextRegionCoprocessorEnvironment c, 
Store store,
-256  ScanType scanType, ScanOptions 
options, CompactionLifeCycleTracker tracker,
-257  CompactionRequest request) throws 
IOException {}
-258
-259  /**
-260   * Called prior to writing the {@link 
StoreFile}s selected for compaction into a new
-261   * {@code StoreFile}.
-262   * p
-263   * To override or modify the compaction 
process, implementing classes can wrap the provided
-264   * {@link InternalScanner} with a 
custom implementation that is returned from this method. The
-265   * custom scanner can then inspect 
{@link org.apache.hadoop.hbase.Cell}s from the wrapped scanner,
-266   * applying its own policy to what gets 
written.
-267   * @param c the environment provided by 
the region server
-268   * @param store the store being 
compacted
-269   * @param scanner the scanner over 
existing data used in the store file rewriting
-270   * @param scanType type of Scan
-271   * @param tracker tracker used to track 
the life cycle of a compaction
-272   * @param request the requested 
compaction
-273   * @return the scanner to use during 
compaction. Should not be {@code null} unless the
-274   * implementation is writing 
new store files on its own.
-275   */
-276  default InternalScanner 
preCompact(ObserverContextRegionCoprocessorEnvironment c, Store 
store,
-277  InternalScanner scanner, ScanType 
scanType, CompactionLifeCycleTracker tracker,
-278  CompactionRequest request) throws 
IOException {
-279return scanner;
-280  }
-281
-282  /**
-283   * Called after compaction has 
completed and the new store file has been moved in to place.
-284   * @param c the environment provided by 
the region server
-285   * @param store the store being 
compacted
-286   * @param resultFile the new store file 
written out during compaction
-287   * @param tracker used to track the 
life cycle of a compaction
-288   * @param request the requested 
compaction
-289   */
-290  default void 
postCompact(ObserverContextRegionCoprocessorEnvironment c, Store 
store,
-291  StoreFile resultFile, 
CompactionLifeCycleTracker tracker, CompactionRequest request)
-292  throws IOException {}
-293
-294  /**
-295   * Called before the region is reported 
as closed to the master.
-296   * @param 

[42/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/apidocs/src-html/org/apache/hadoop/hbase/client/Put.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Put.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Put.html
index 7d3d133..75a155d 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Put.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Put.html
@@ -35,487 +35,355 @@
 027import java.util.NavigableMap;
 028import java.util.TreeMap;
 029import java.util.UUID;
-030
-031import org.apache.hadoop.hbase.Cell;
-032import 
org.apache.hadoop.hbase.CellUtil;
-033import 
org.apache.hadoop.hbase.HConstants;
+030import org.apache.hadoop.hbase.Cell;
+031import 
org.apache.hadoop.hbase.CellUtil;
+032import 
org.apache.hadoop.hbase.HConstants;
+033import 
org.apache.hadoop.hbase.IndividualBytesFieldCell;
 034import 
org.apache.hadoop.hbase.KeyValue;
-035import 
org.apache.hadoop.hbase.IndividualBytesFieldCell;
-036import org.apache.hadoop.hbase.Tag;
-037import 
org.apache.yetus.audience.InterfaceAudience;
-038import 
org.apache.hadoop.hbase.io.HeapSize;
-039import 
org.apache.hadoop.hbase.security.access.Permission;
-040import 
org.apache.hadoop.hbase.security.visibility.CellVisibility;
-041import 
org.apache.hadoop.hbase.util.Bytes;
-042
-043/**
-044 * Used to perform Put operations for a 
single row.
-045 * p
-046 * To perform a Put, instantiate a Put 
object with the row to insert to, and
-047 * for each column to be inserted, 
execute {@link #addColumn(byte[], byte[],
-048 * byte[]) add} or {@link 
#addColumn(byte[], byte[], long, byte[]) add} if
-049 * setting the timestamp.
-050 */
-051@InterfaceAudience.Public
-052public class Put extends Mutation 
implements HeapSize, ComparableRow {
-053  /**
-054   * Create a Put operation for the 
specified row.
-055   * @param row row key
-056   */
-057  public Put(byte [] row) {
-058this(row, 
HConstants.LATEST_TIMESTAMP);
-059  }
-060
-061  /**
-062   * Create a Put operation for the 
specified row, using a given timestamp.
-063   *
-064   * @param row row key; we make a copy 
of what we are passed to keep local.
-065   * @param ts timestamp
-066   */
-067  public Put(byte[] row, long ts) {
-068this(row, 0, row.length, ts);
-069  }
-070
-071  /**
-072   * We make a copy of the passed in row 
key to keep local.
-073   * @param rowArray
-074   * @param rowOffset
-075   * @param rowLength
-076   */
-077  public Put(byte [] rowArray, int 
rowOffset, int rowLength) {
-078this(rowArray, rowOffset, rowLength, 
HConstants.LATEST_TIMESTAMP);
-079  }
-080
-081  /**
-082   * @param row row key; we make a copy 
of what we are passed to keep local.
-083   * @param ts  timestamp
-084   */
-085  public Put(ByteBuffer row, long ts) {
-086if (ts  0) {
-087  throw new 
IllegalArgumentException("Timestamp cannot be negative. ts=" + ts);
-088}
-089checkRow(row);
-090this.row = new 
byte[row.remaining()];
-091row.get(this.row);
-092this.ts = ts;
-093  }
-094
-095  /**
-096   * @param row row key; we make a copy 
of what we are passed to keep local.
-097   */
-098  public Put(ByteBuffer row) {
-099this(row, 
HConstants.LATEST_TIMESTAMP);
-100  }
-101
-102  /**
-103   * We make a copy of the passed in row 
key to keep local.
-104   * @param rowArray
-105   * @param rowOffset
-106   * @param rowLength
-107   * @param ts
-108   */
-109  public Put(byte [] rowArray, int 
rowOffset, int rowLength, long ts) {
-110checkRow(rowArray, rowOffset, 
rowLength);
-111this.row = Bytes.copy(rowArray, 
rowOffset, rowLength);
-112this.ts = ts;
-113if (ts  0) {
-114  throw new 
IllegalArgumentException("Timestamp cannot be negative. ts=" + ts);
-115}
-116  }
-117
-118  /**
-119   * Create a Put operation for an 
immutable row key.
-120   *
-121   * @param row row key
-122   * @param rowIsImmutable whether the 
input row is immutable.
-123   *   Set to true if 
the caller can guarantee that
-124   *   the row will 
not be changed for the Put duration.
-125   */
-126  public Put(byte [] row, boolean 
rowIsImmutable) {
-127this(row, 
HConstants.LATEST_TIMESTAMP, rowIsImmutable);
-128  }
-129
-130  /**
-131   * Create a Put operation for an 
immutable row key, using a given timestamp.
-132   *
-133   * @param row row key
-134   * @param ts timestamp
-135   * @param rowIsImmutable whether the 
input row is immutable.
-136   *   Set to true if 
the caller can guarantee that
-137   *   the row will 
not be changed for the Put duration.
-138   */
-139  public Put(byte[] row, long ts, boolean 
rowIsImmutable) {
-140// Check and set timestamp
-141if (ts  0) {
-142  throw new 
IllegalArgumentException("Timestamp cannot be negative. ts=" + ts);
-143}
-144this.ts = ts;
-145
-146// Deal with row according to 
rowIsImmutable
-147 

[26/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/src-html/org/apache/hadoop/hbase/client/Put.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/Put.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/Put.html
index 7d3d133..75a155d 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/Put.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/Put.html
@@ -35,487 +35,355 @@
 027import java.util.NavigableMap;
 028import java.util.TreeMap;
 029import java.util.UUID;
-030
-031import org.apache.hadoop.hbase.Cell;
-032import 
org.apache.hadoop.hbase.CellUtil;
-033import 
org.apache.hadoop.hbase.HConstants;
+030import org.apache.hadoop.hbase.Cell;
+031import 
org.apache.hadoop.hbase.CellUtil;
+032import 
org.apache.hadoop.hbase.HConstants;
+033import 
org.apache.hadoop.hbase.IndividualBytesFieldCell;
 034import 
org.apache.hadoop.hbase.KeyValue;
-035import 
org.apache.hadoop.hbase.IndividualBytesFieldCell;
-036import org.apache.hadoop.hbase.Tag;
-037import 
org.apache.yetus.audience.InterfaceAudience;
-038import 
org.apache.hadoop.hbase.io.HeapSize;
-039import 
org.apache.hadoop.hbase.security.access.Permission;
-040import 
org.apache.hadoop.hbase.security.visibility.CellVisibility;
-041import 
org.apache.hadoop.hbase.util.Bytes;
-042
-043/**
-044 * Used to perform Put operations for a 
single row.
-045 * p
-046 * To perform a Put, instantiate a Put 
object with the row to insert to, and
-047 * for each column to be inserted, 
execute {@link #addColumn(byte[], byte[],
-048 * byte[]) add} or {@link 
#addColumn(byte[], byte[], long, byte[]) add} if
-049 * setting the timestamp.
-050 */
-051@InterfaceAudience.Public
-052public class Put extends Mutation 
implements HeapSize, ComparableRow {
-053  /**
-054   * Create a Put operation for the 
specified row.
-055   * @param row row key
-056   */
-057  public Put(byte [] row) {
-058this(row, 
HConstants.LATEST_TIMESTAMP);
-059  }
-060
-061  /**
-062   * Create a Put operation for the 
specified row, using a given timestamp.
-063   *
-064   * @param row row key; we make a copy 
of what we are passed to keep local.
-065   * @param ts timestamp
-066   */
-067  public Put(byte[] row, long ts) {
-068this(row, 0, row.length, ts);
-069  }
-070
-071  /**
-072   * We make a copy of the passed in row 
key to keep local.
-073   * @param rowArray
-074   * @param rowOffset
-075   * @param rowLength
-076   */
-077  public Put(byte [] rowArray, int 
rowOffset, int rowLength) {
-078this(rowArray, rowOffset, rowLength, 
HConstants.LATEST_TIMESTAMP);
-079  }
-080
-081  /**
-082   * @param row row key; we make a copy 
of what we are passed to keep local.
-083   * @param ts  timestamp
-084   */
-085  public Put(ByteBuffer row, long ts) {
-086if (ts  0) {
-087  throw new 
IllegalArgumentException("Timestamp cannot be negative. ts=" + ts);
-088}
-089checkRow(row);
-090this.row = new 
byte[row.remaining()];
-091row.get(this.row);
-092this.ts = ts;
-093  }
-094
-095  /**
-096   * @param row row key; we make a copy 
of what we are passed to keep local.
-097   */
-098  public Put(ByteBuffer row) {
-099this(row, 
HConstants.LATEST_TIMESTAMP);
-100  }
-101
-102  /**
-103   * We make a copy of the passed in row 
key to keep local.
-104   * @param rowArray
-105   * @param rowOffset
-106   * @param rowLength
-107   * @param ts
-108   */
-109  public Put(byte [] rowArray, int 
rowOffset, int rowLength, long ts) {
-110checkRow(rowArray, rowOffset, 
rowLength);
-111this.row = Bytes.copy(rowArray, 
rowOffset, rowLength);
-112this.ts = ts;
-113if (ts  0) {
-114  throw new 
IllegalArgumentException("Timestamp cannot be negative. ts=" + ts);
-115}
-116  }
-117
-118  /**
-119   * Create a Put operation for an 
immutable row key.
-120   *
-121   * @param row row key
-122   * @param rowIsImmutable whether the 
input row is immutable.
-123   *   Set to true if 
the caller can guarantee that
-124   *   the row will 
not be changed for the Put duration.
-125   */
-126  public Put(byte [] row, boolean 
rowIsImmutable) {
-127this(row, 
HConstants.LATEST_TIMESTAMP, rowIsImmutable);
-128  }
-129
-130  /**
-131   * Create a Put operation for an 
immutable row key, using a given timestamp.
-132   *
-133   * @param row row key
-134   * @param ts timestamp
-135   * @param rowIsImmutable whether the 
input row is immutable.
-136   *   Set to true if 
the caller can guarantee that
-137   *   the row will 
not be changed for the Put duration.
-138   */
-139  public Put(byte[] row, long ts, boolean 
rowIsImmutable) {
-140// Check and set timestamp
-141if (ts  0) {
-142  throw new 
IllegalArgumentException("Timestamp cannot be negative. ts=" + ts);
-143}
-144this.ts = ts;
-145
-146// Deal with row according to 

[49/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/apidocs/index-all.html
--
diff --git a/apidocs/index-all.html b/apidocs/index-all.html
index f491795..aa5c231 100644
--- a/apidocs/index-all.html
+++ b/apidocs/index-all.html
@@ -5689,7 +5689,7 @@
 
 Create a Get operation for the specified row.
 
-get(byte[], 
byte[]) - Method in class org.apache.hadoop.hbase.client.Put
+get(byte[],
 byte[]) - Method in class org.apache.hadoop.hbase.client.Mutation
 
 Returns a list of all KeyValue objects with matching column 
family and qualifier.
 
@@ -8625,26 +8625,28 @@
 
 ASCII code for 'H'
 
-has(byte[], 
byte[]) - Method in class org.apache.hadoop.hbase.client.Put
+has(byte[],
 byte[]) - Method in class org.apache.hadoop.hbase.client.Mutation
 
 A convenience method to determine if this object's 
familyMap contains
  a value assigned to the given family  qualifier.
 
-has(byte[],
 byte[], long) - Method in class org.apache.hadoop.hbase.client.Put
+has(byte[],
 byte[], long) - Method in class org.apache.hadoop.hbase.client.Mutation
 
 A convenience method to determine if this object's 
familyMap contains
  a value assigned to the given family, qualifier and timestamp.
 
-has(byte[],
 byte[], byte[]) - Method in class org.apache.hadoop.hbase.client.Put
+has(byte[],
 byte[], byte[]) - Method in class org.apache.hadoop.hbase.client.Mutation
 
 A convenience method to determine if this object's 
familyMap contains
  a value assigned to the given family, qualifier and timestamp.
 
-has(byte[],
 byte[], long, byte[]) - Method in class 
org.apache.hadoop.hbase.client.Put
+has(byte[],
 byte[], long, byte[]) - Method in class 
org.apache.hadoop.hbase.client.Mutation
 
 A convenience method to determine if this object's 
familyMap contains
  the given value assigned to the given family, qualifier and timestamp.
 
+has(byte[],
 byte[], long, byte[], boolean, boolean) - Method in class 
org.apache.hadoop.hbase.client.Mutation
+
 HAS_LARGE_RESULT
 - Static variable in class org.apache.hadoop.hbase.mapreduce.Import
 
 hasBody()
 - Method in class org.apache.hadoop.hbase.rest.client.Response
@@ -16175,14 +16177,22 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
 
 
+setTimestamp(long)
 - Method in class org.apache.hadoop.hbase.client.Append
+
 setTimestamp(long)
 - Method in class org.apache.hadoop.hbase.client.Delete
-
-Set the timestamp of the delete.
-
+
 setTimeStamp(long)
 - Method in class org.apache.hadoop.hbase.client.Get
 
 Get versions of columns with the specified timestamp.
 
+setTimestamp(long)
 - Method in class org.apache.hadoop.hbase.client.Increment
+
+setTimestamp(long)
 - Method in class org.apache.hadoop.hbase.client.Mutation
+
+Set the timestamp of the delete.
+
+setTimestamp(long)
 - Method in class org.apache.hadoop.hbase.client.Put
+
 setTimeStamp(long)
 - Method in class org.apache.hadoop.hbase.client.Scan
 
 Get versions of columns with the specified timestamp.
@@ -17838,8 +17848,6 @@ Input/OutputFormats, a table indexing MapReduce job, 
and utility methods.
 As of release 2.0.0, this 
will be removed in HBase 3.0.0.
 
 
-toMap(int)
 - Method in class org.apache.hadoop.hbase.client.Delete
-
 toMap(int) 
- Method in class org.apache.hadoop.hbase.client.Get
 
 Compile the details beyond the scope of getFingerprint 
(row, columns,

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html 
b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
index 5fbfece..d758d09 100644
--- a/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
+++ b/apidocs/org/apache/hadoop/hbase/class-use/Cell.html
@@ -1077,7 +1077,7 @@ Input/OutputFormats, a table indexing MapReduce job, and 
utility methods.
 
 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCell
-Put.get(byte[]family,
+Mutation.get(byte[]family,
byte[]qualifier)
 Returns a list of all KeyValue objects with matching column 
family and qualifier.
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/apidocs/org/apache/hadoop/hbase/client/Append.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Append.html 
b/apidocs/org/apache/hadoop/hbase/client/Append.html
index 14799fe..f18d6ba 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Append.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Append.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":42,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10};
+var methods = 

[30/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html 
b/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
index 19cade2..c6f511e 100644
--- a/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
+++ b/devapidocs/org/apache/hadoop/hbase/security/access/AccessController.html
@@ -3286,7 +3286,8 @@ implements Description copied from 
interface:RegionObserver
 Called before the client performs a Get
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
 
 Specified by:
 preGetOpin
 interfaceRegionObserver
@@ -3314,7 +3315,8 @@ implements Description copied from 
interface:RegionObserver
 Called before the client tests for existence using a Get.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
 
 Specified by:
 preExistsin
 interfaceRegionObserver
@@ -3343,7 +3345,8 @@ implements Description copied from 
interface:RegionObserver
 Called before the client stores a value.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
  
  Note: Do not retain references to any Cells in 'put' beyond the life of this 
invocation.
  If need a Cell reference for later use, copy the cell and use that.
@@ -3400,7 +3403,8 @@ implements Description copied from 
interface:RegionObserver
 Called before the client deletes a value.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
  
  Note: Do not retain references to any Cells in 'delete' beyond the life of 
this invocation.
  If need a Cell reference for later use, copy the cell and use that.
@@ -3493,7 +3497,8 @@ implements Description copied from 
interface:RegionObserver
 Called before checkAndPut.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
  
  Note: Do not retain references to any Cells in 'put' beyond the life of this 
invocation.
  If need a Cell reference for later use, copy the cell and use that.
@@ -3538,7 +3543,8 @@ implements Description copied from 
interface:RegionObserver
 Called before checkAndDelete.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
  
  Note: Do not retain references to any Cells in 'delete' beyond the life of 
this invocation.
  If need a Cell reference for later use, copy the cell and use that.
@@ -3623,7 +3630,8 @@ implements Description copied from 
interface:RegionObserver
 Called before Append.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
  
  Note: Do not retain references to any Cells in 'append' beyond the life of 
this invocation.
  If need a Cell reference for later use, copy the cell and use that.
@@ -3690,7 +3699,8 @@ implements Description copied from 
interface:RegionObserver
 Called before Increment.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
  
  Note: Do not retain references to any Cells in 'increment' beyond the life of 
this invocation.
  If need a Cell reference for later use, copy the cell and use that.
@@ -3752,7 +3763,8 @@ implements Description copied from 
interface:RegionObserver
 Called before the client asks for the next row on a scanner.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
  
  Note: Do not retain references to any Cells returned by scanner, beyond the 
life of this
  invocation. If need a Cell reference for later use, copy the cell and use 
that.
@@ -3906,7 +3919,8 @@ implements Description copied from 
interface:RegionObserver
 Called before the client closes a scanner.
  
- Call 

[16/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-annotations/dependency-info.html
--
diff --git a/hbase-annotations/dependency-info.html 
b/hbase-annotations/dependency-info.html
index cd557dd..73b04b8 100644
--- a/hbase-annotations/dependency-info.html
+++ b/hbase-annotations/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  Dependency Information
 
@@ -147,7 +147,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-annotations/dependency-management.html
--
diff --git a/hbase-annotations/dependency-management.html 
b/hbase-annotations/dependency-management.html
index 2ce3384..2ac5ed6 100644
--- a/hbase-annotations/dependency-management.html
+++ b/hbase-annotations/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  Project Dependency 
Management
 
@@ -775,18 +775,24 @@
 test-jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
+org.apache.hbase
+http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
+3.0.0-SNAPSHOT
+test-jar
+https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
+
 org.bouncycastle
 http://www.bouncycastle.org/java.html;>bcprov-jdk16
 1.46
 jar
 http://www.bouncycastle.org/licence.html;>Bouncy Castle 
Licence
-
+
 org.hamcrest
 https://github.com/hamcrest/JavaHamcrest/hamcrest-core;>hamcrest-core
 1.3
 jar
 http://www.opensource.org/licenses/bsd-license.php;>New BSD 
License
-
+
 org.mockito
 http://mockito.org;>mockito-core
 2.1.0
@@ -804,7 +810,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-annotations/index.html
--
diff --git a/hbase-annotations/index.html b/hbase-annotations/index.html
index 3f5ad69..e6e2d3b 100644
--- a/hbase-annotations/index.html
+++ b/hbase-annotations/index.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  About
 
@@ -119,7 +119,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-annotations/integration.html
--
diff --git a/hbase-annotations/integration.html 
b/hbase-annotations/integration.html
index ea953cf..65ed760 100644
--- a/hbase-annotations/integration.html
+++ b/hbase-annotations/integration.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  CI Management
 
@@ -126,7 +126,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-annotations/issue-tracking.html
--
diff --git a/hbase-annotations/issue-tracking.html 
b/hbase-annotations/issue-tracking.html
index d02e8d8..443efdd 100644
--- a/hbase-annotations/issue-tracking.html
+++ b/hbase-annotations/issue-tracking.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  Issue Management
 
@@ -123,7 +123,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-annotations/license.html
--
diff --git a/hbase-annotations/license.html b/hbase-annotations/license.html
index 23d4722..c722bfd 100644
--- a/hbase-annotations/license.html
+++ b/hbase-annotations/license.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 

[17/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-annotations/dependency-convergence.html
--
diff --git a/hbase-annotations/dependency-convergence.html 
b/hbase-annotations/dependency-convergence.html
index ecbd0c9..f699666 100644
--- a/hbase-annotations/dependency-convergence.html
+++ b/hbase-annotations/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  Reactor Dependency 
Convergence
 
@@ -488,22 +488,22 @@
 3.4.10
 
 
-org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.zookeeper:zookeeper:jar:3.4.10:compile|+-org.apache.hadoop:hadoop-common:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.had
 oop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAP
 SHOT:test|+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-minicluster:jar:2.7.4:test|+-org.apache.hadoop:hadoop-common:test-jar:tests:2.7.4:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.7.4:test|\-org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.7.4:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for dupli
 cate)+-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)\-org.apache.hbase:hbase-rsgroup:jar:3.0.0-SNAPSHOT:compile\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile 
- version managed from 3.4.6
 ; omitted for duplicate)
-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for dup
 licate)|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 
version managed from 3.4.6; omitted for duplicat
 e)+-org.apache.hbase:hbase-mapreduce:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 

[33/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/org/apache/hadoop/hbase/client/Put.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Put.html 
b/devapidocs/org/apache/hadoop/hbase/client/Put.html
index e3cc37b..96340df 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Put.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Put.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -129,7 +129,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class Put
+public class Put
 extends Mutation
 implements HeapSize, http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableRow
 Used to perform Put operations for a single row.
@@ -332,104 +332,58 @@ implements 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCell
-get(byte[]family,
-   byte[]qualifier)
-Returns a list of all KeyValue objects with matching column 
family and qualifier.
-
-
-
-boolean
-has(byte[]family,
-   byte[]qualifier)
-A convenience method to determine if this object's 
familyMap contains
- a value assigned to the given family  qualifier.
-
-
-
-boolean
-has(byte[]family,
-   byte[]qualifier,
-   byte[]value)
-A convenience method to determine if this object's 
familyMap contains
- a value assigned to the given family, qualifier and timestamp.
-
-
-
-boolean
-has(byte[]family,
-   byte[]qualifier,
-   longts)
-A convenience method to determine if this object's 
familyMap contains
- a value assigned to the given family, qualifier and timestamp.
-
-
-
-boolean
-has(byte[]family,
-   byte[]qualifier,
-   longts,
-   byte[]value)
-A convenience method to determine if this object's 
familyMap contains
- the given value assigned to the given family, qualifier and timestamp.
-
-
-
-private boolean
-has(byte[]family,
-   byte[]qualifier,
-   longts,
-   byte[]value,
-   booleanignoreTS,
-   booleanignoreValue)
-
-
 Put
 setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,Permissionperms)
 
-
+
 Put
 setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringuser,
   Permissionperms)
 
-
+
 Put
 setAttribute(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
 byte[]value)
 Sets an attribute.
 
 
-
+
 Put
 setCellVisibility(CellVisibilityexpression)
 Sets the visibility expression associated with cells in 
this Mutation.
 
 
-
+
 Put
 setClusterIds(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/UUID.html?is-external=true;
 title="class or interface in java.util">UUIDclusterIds)
 Marks that the clusters with the given clusterIds have 
consumed the mutation
 
 
-
+
 Put
 setDurability(Durabilityd)
 Set the durability for this mutation
 
 
-
+
 Put
 setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 Method for setting the put's familyMap
 
 
-
+
 Put
 setId(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid)
 This method allows you to set an identifier on an 
operation.
 
 
-
+
+Put
+setTimestamp(longtimestamp)
+Set the timestamp of the delete.
+
+
+
 Put
 setTTL(longttl)
 Set the TTL desired for the result of the mutation, in 
milliseconds.
@@ -441,7 +395,7 @@ implements 
 
 Methods inherited from classorg.apache.hadoop.hbase.client.Mutation
-cellScanner,
 checkRow,
 checkRow,
 checkRow,
 compareTo,
 createPutKeyValue,
 createPutKeyValue,
 createPutKeyValue, extraHeapSize,
 getACL,
 getCellList,
 getCellVisibility,
 getClusterIds,
 getDurability,
 getFamilyCellMap,
 

[28/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/src-html/org/apache/hadoop/hbase/client/Increment.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/Increment.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/Increment.html
index 312947a..b283abc 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/Increment.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/Increment.html
@@ -32,317 +32,322 @@
 024import java.util.NavigableMap;
 025import java.util.TreeMap;
 026import java.util.UUID;
-027
-028import org.apache.hadoop.hbase.Cell;
-029import 
org.apache.hadoop.hbase.CellUtil;
-030import 
org.apache.hadoop.hbase.KeyValue;
-031import 
org.apache.yetus.audience.InterfaceAudience;
-032import 
org.apache.hadoop.hbase.io.TimeRange;
-033import 
org.apache.hadoop.hbase.security.access.Permission;
-034import 
org.apache.hadoop.hbase.security.visibility.CellVisibility;
-035import 
org.apache.hadoop.hbase.util.Bytes;
-036import 
org.apache.hadoop.hbase.util.ClassSize;
-037
-038/**
-039 * Used to perform Increment operations 
on a single row.
-040 * p
-041 * This operation ensures atomicity to 
readers. Increments are done
-042 * under a single row lock, so write 
operations to a row are synchronized, and
-043 * readers are guaranteed to see this 
operation fully completed.
-044 * p
-045 * To increment columns of a row, 
instantiate an Increment object with the row
-046 * to increment.  At least one column to 
increment must be specified using the
-047 * {@link #addColumn(byte[], byte[], 
long)} method.
-048 */
-049@InterfaceAudience.Public
-050public class Increment extends Mutation 
implements ComparableRow {
-051  private static final long HEAP_OVERHEAD 
=  ClassSize.REFERENCE + ClassSize.TIMERANGE;
-052  private TimeRange tr = new 
TimeRange();
-053
-054  /**
-055   * Create a Increment operation for the 
specified row.
-056   * p
-057   * At least one column must be 
incremented.
-058   * @param row row key (we will make a 
copy of this).
-059   */
-060  public Increment(byte [] row) {
-061this(row, 0, row.length);
-062  }
-063
-064  /**
-065   * Create a Increment operation for the 
specified row.
-066   * p
-067   * At least one column must be 
incremented.
-068   * @param row row key (we will make a 
copy of this).
-069   */
-070  public Increment(final byte [] row, 
final int offset, final int length) {
-071checkRow(row, offset, length);
-072this.row = Bytes.copy(row, offset, 
length);
-073  }
-074  /**
-075   * Copy constructor
-076   * @param i
-077   */
-078  public Increment(Increment i) {
-079this.row = i.getRow();
-080this.ts = i.getTimeStamp();
-081this.tr = i.getTimeRange();
-082
this.familyMap.putAll(i.getFamilyCellMap());
-083for (Map.EntryString, byte[] 
entry : i.getAttributesMap().entrySet()) {
-084  this.setAttribute(entry.getKey(), 
entry.getValue());
-085}
-086super.setPriority(i.getPriority());
-087  }
-088
-089  /**
-090   * Add the specified KeyValue to this 
operation.
-091   * @param cell individual Cell
-092   * @return this
-093   * @throws java.io.IOException e
-094   */
-095  public Increment add(Cell cell) throws 
IOException{
-096byte [] family = 
CellUtil.cloneFamily(cell);
-097ListCell list = 
getCellList(family);
-098//Checking that the row of the kv is 
the same as the put
-099if (!CellUtil.matchingRows(cell, 
this.row)) {
-100  throw new WrongRowIOException("The 
row in " + cell +
-101" doesn't match the original one 
" +  Bytes.toStringBinary(this.row));
-102}
-103list.add(cell);
-104return this;
-105  }
-106
-107  /**
-108   * Increment the column from the 
specific family with the specified qualifier
-109   * by the specified amount.
-110   * p
-111   * Overrides previous calls to 
addColumn for this family and qualifier.
-112   * @param family family name
-113   * @param qualifier column qualifier
-114   * @param amount amount to increment 
by
-115   * @return the Increment object
-116   */
-117  public Increment addColumn(byte [] 
family, byte [] qualifier, long amount) {
-118if (family == null) {
-119  throw new 
IllegalArgumentException("family cannot be null");
-120}
-121ListCell list = 
getCellList(family);
-122KeyValue kv = 
createPutKeyValue(family, qualifier, ts, Bytes.toBytes(amount));
-123list.add(kv);
-124return this;
-125  }
-126
-127  /**
-128   * Gets the TimeRange used for this 
increment.
-129   * @return TimeRange
-130   */
-131  public TimeRange getTimeRange() {
-132return this.tr;
-133  }
-134
-135  /**
-136   * Sets the TimeRange to be used on the 
Get for this increment.
-137   * p
-138   * This is useful for when you have 
counters that only last for specific
-139   * periods of time (ie. counters that 
are partitioned by time).  By setting
-140   * the range of valid times for this 
increment, 

[32/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
index d615cc5..c25b53c 100644
--- a/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
+++ b/devapidocs/org/apache/hadoop/hbase/coprocessor/RegionObserver.html
@@ -1007,7 +1007,7 @@ default
 
 preCompactSelection
-defaultvoidpreCompactSelection(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpreCompactSelection(ObserverContextRegionCoprocessorEnvironmentc,
  Storestore,
  http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends StoreFilecandidates,
  CompactionLifeCycleTrackertracker)
@@ -1016,7 +1016,8 @@ defaultParameters:
 c - the environment provided by the region server
@@ -1034,7 +1035,7 @@ default
 
 postCompactSelection
-defaultvoidpostCompactSelection(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpostCompactSelection(ObserverContextRegionCoprocessorEnvironmentc,
   Storestore,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends StoreFileselected,
   CompactionLifeCycleTrackertracker,
@@ -1057,7 +1058,7 @@ default
 
 preCompactScannerOpen
-defaultvoidpreCompactScannerOpen(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpreCompactScannerOpen(ObserverContextRegionCoprocessorEnvironmentc,
Storestore,
ScanTypescanType,
ScanOptionsoptions,
@@ -1085,7 +1086,7 @@ default
 
 preCompact
-defaultInternalScannerpreCompact(ObserverContextRegionCoprocessorEnvironmentc,
+defaultInternalScannerpreCompact(ObserverContextRegionCoprocessorEnvironmentc,
Storestore,
InternalScannerscanner,
ScanTypescanType,
@@ -1121,7 +1122,7 @@ default
 
 postCompact
-defaultvoidpostCompact(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpostCompact(ObserverContextRegionCoprocessorEnvironmentc,
  Storestore,
  StoreFileresultFile,
  CompactionLifeCycleTrackertracker,
@@ -1146,7 +1147,7 @@ default
 
 preClose
-defaultvoidpreClose(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpreClose(ObserverContextRegionCoprocessorEnvironmentc,
   booleanabortRequested)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Called before the region is reported as closed to the 
master.
@@ -1165,7 +1166,7 @@ default
 
 postClose
-defaultvoidpostClose(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpostClose(ObserverContextRegionCoprocessorEnvironmentc,
booleanabortRequested)
 Called after the region is reported as closed to the 
master.
 
@@ -1181,13 +1182,14 @@ default
 
 preGetOp
-defaultvoidpreGetOp(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpreGetOp(ObserverContextRegionCoprocessorEnvironmentc,
   Getget,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellresult)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Called before the client performs a Get
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
 
 Parameters:
 c - the environment provided by the region server
@@ -1206,7 +1208,7 @@ default
 
 postGetOp
-defaultvoidpostGetOp(ObserverContextRegionCoprocessorEnvironmentc,
+defaultvoidpostGetOp(ObserverContextRegionCoprocessorEnvironmentc,
Getget,
http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellresult)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -1230,13 +1232,14 @@ default
 
 preExists
-defaultbooleanpreExists(ObserverContextRegionCoprocessorEnvironmentc,

[35/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/org/apache/hadoop/hbase/client/Delete.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Delete.html 
b/devapidocs/org/apache/hadoop/hbase/client/Delete.html
index d617678..d288bff 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Delete.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Delete.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":42,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":42,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -129,7 +129,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class Delete
+public class Delete
 extends Mutation
 implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableRow
 Used to perform Delete operations on a single row.
@@ -375,20 +375,13 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 Set the TTL desired for the result of the mutation, in 
milliseconds.
 
 
-
-http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
-toMap(intmaxCols)
-Compile the details beyond the scope of getFingerprint 
(row, columns,
- timestamps, etc.) into a Map along with the fingerprinted information.
-
-
 
 
 
 
 
 Methods inherited from classorg.apache.hadoop.hbase.client.Mutation
-cellScanner,
 checkRow,
 checkRow,
 checkRow,
 compareTo,
 createPutKeyValue,
 createPutKeyValue,
 createPutKeyValue, extraHeapSize,
 getACL,
 getCellList,
 getCellVisibility,
 getClusterIds,
 getDurability,
 getFamilyCellMap,
 getFingerprint,
 getRow, href="../../../../../org/apache/hadoop/hbase/client/Mutation.html#getTimeStamp--">getTimeStamp,
 > href="../../../../../org/apache/hadoop/hbase/client/Mutation.html#getTTL--">getTTL,
 > href="../../../../../org/apache/hadoop/hbase/client/Mutation.html#heapSize--">heapSize,
 > href="../../../../../org/apache/hadoop/hbase/client/Mutation.html#isEmpty--">isEmpty,
 > href="../../../../../org/apache/hadoop/hbase/client/Mutation.html#isReturnResults--">isReturnResults,
 > href="../../../../../org/apache/hadoop/hbase/client/Mutation.html#numFamilies--">numFamilies,
 > href="../../../../../org/apache/hadoop/hbase/client/Mutation.html#setReturnResults-boolean-">setReturnResults,
 > href="../../../../../org/apache/hadoop/hbase/client/Mutation.html#size--">size,
 > href="../../../../../org/apache/hadoop/hbase/client/Mutation.html#toCellVisibility-org.apache.hadoop.hbase.security.visibility.CellVisibility-">toCellVisibility
+cellScanner,
 checkRow,
 checkRow,
 checkRow,
 compareTo,
 createPutKeyValue,
 createPutKeyValue,
 createPutKeyValue, extraHeapSize,
 get,
 getACL,
 getCellList,
 getCellVisibility,
 getClusterIds,
 getDurability,
 getFamilyCellMap,
 getFingerp
 rint, getRow,
 getTimeStamp,
 getTTL,
 has,
 has,
 has,
 has,
 has,
 heapSize,
 isEmpty, isReturnResults,
 numFamilies,
 setReturnResults,
 size,
 toCellVisibility,
 toMap
 
 
 
@@ -438,7 +431,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Delete
-publicDelete(byte[]row)
+publicDelete(byte[]row)
 Create a Delete operation for the specified row.
  
  If no further operations are done, this will delete everything
@@ -458,7 +451,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Delete
-publicDelete(byte[]row,
+publicDelete(byte[]row,
   longtimestamp)
 Create a Delete operation for the specified row and 
timestamp.
 
@@ -481,7 +474,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Delete
-publicDelete(byte[]row,
+publicDelete(byte[]row,
   introwOffset,
   introwLength)
 Create a Delete operation for the specified row and 
timestamp.
@@ -506,7 +499,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Delete
-publicDelete(byte[]row,
+publicDelete(byte[]row,
   introwOffset,
   introwLength,
   longtimestamp)
@@ -533,7 +526,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Delete

[47/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/apidocs/org/apache/hadoop/hbase/client/Put.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Put.html 
b/apidocs/org/apache/hadoop/hbase/client/Put.html
index 7d177ab..37773cd 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Put.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Put.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -129,7 +129,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class Put
+public class Put
 extends Mutation
 implements org.apache.hadoop.hbase.io.HeapSize, http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableRow
 Used to perform Put operations for a single row.
@@ -303,95 +303,58 @@ implements org.apache.hadoop.hbase.io.HeapSize, http://docs.oracle.com/
 
 
 
-http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCell
-get(byte[]family,
-   byte[]qualifier)
-Returns a list of all KeyValue objects with matching column 
family and qualifier.
-
-
-
-boolean
-has(byte[]family,
-   byte[]qualifier)
-A convenience method to determine if this object's 
familyMap contains
- a value assigned to the given family  qualifier.
-
-
-
-boolean
-has(byte[]family,
-   byte[]qualifier,
-   byte[]value)
-A convenience method to determine if this object's 
familyMap contains
- a value assigned to the given family, qualifier and timestamp.
-
-
-
-boolean
-has(byte[]family,
-   byte[]qualifier,
-   longts)
-A convenience method to determine if this object's 
familyMap contains
- a value assigned to the given family, qualifier and timestamp.
-
-
-
-boolean
-has(byte[]family,
-   byte[]qualifier,
-   longts,
-   byte[]value)
-A convenience method to determine if this object's 
familyMap contains
- the given value assigned to the given family, qualifier and timestamp.
-
-
-
 Put
 setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">String,org.apache.hadoop.hbase.security.access.Permissionperms)
 
-
+
 Put
 setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringuser,
   
org.apache.hadoop.hbase.security.access.Permissionperms)
 
-
+
 Put
 setAttribute(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
 byte[]value)
 Sets an attribute.
 
 
-
+
 Put
 setCellVisibility(org.apache.hadoop.hbase.security.visibility.CellVisibilityexpression)
 Sets the visibility expression associated with cells in 
this Mutation.
 
 
-
+
 Put
 setClusterIds(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/UUID.html?is-external=true;
 title="class or interface in java.util">UUIDclusterIds)
 Marks that the clusters with the given clusterIds have 
consumed the mutation
 
 
-
+
 Put
 setDurability(Durabilityd)
 Set the durability for this mutation
 
 
-
+
 Put
 setFamilyCellMap(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCellmap)
 Method for setting the put's familyMap
 
 
-
+
 Put
 setId(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringid)
 This method allows you to set an identifier on an 
operation.
 
 
-
+
+Put
+setTimestamp(longtimestamp)
+Set the timestamp of the delete.
+
+
+
 Put
 setTTL(longttl)
 Set the TTL desired for the result of the mutation, in 
milliseconds.
@@ -403,7 +366,7 @@ implements org.apache.hadoop.hbase.io.HeapSize, http://docs.oracle.com/
 
 
 Methods inherited from classorg.apache.hadoop.hbase.client.Mutation
-cellScanner,
 compareTo,
 extraHeapSize,
 getACL,
 getCellVisibility,
 getClusterIds,
 getDurability,
 getFamilyCellMap,
 getFingerprint,
 getRow, getTimeStamp,
 

[45/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/apidocs/src-html/org/apache/hadoop/hbase/client/Delete.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Delete.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Delete.html
index 686fe97..e2e01c7 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Delete.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Delete.html
@@ -32,348 +32,331 @@
 024import java.util.Map;
 025import java.util.NavigableMap;
 026import java.util.UUID;
-027
-028import org.apache.hadoop.hbase.Cell;
-029import 
org.apache.hadoop.hbase.CellUtil;
-030import 
org.apache.hadoop.hbase.HConstants;
-031import 
org.apache.hadoop.hbase.KeyValue;
-032import 
org.apache.yetus.audience.InterfaceAudience;
-033import 
org.apache.hadoop.hbase.security.access.Permission;
-034import 
org.apache.hadoop.hbase.security.visibility.CellVisibility;
-035import 
org.apache.hadoop.hbase.util.Bytes;
-036
-037/**
-038 * Used to perform Delete operations on a 
single row.
-039 * p
-040 * To delete an entire row, instantiate a 
Delete object with the row
-041 * to delete.  To further define the 
scope of what to delete, perform
-042 * additional methods as outlined 
below.
-043 * p
-044 * To delete specific families, execute 
{@link #addFamily(byte[]) deleteFamily}
-045 * for each family to delete.
-046 * p
-047 * To delete multiple versions of 
specific columns, execute
-048 * {@link #addColumns(byte[], byte[]) 
deleteColumns}
-049 * for each column to delete.
-050 * p
-051 * To delete specific versions of 
specific columns, execute
-052 * {@link #addColumn(byte[], byte[], 
long) deleteColumn}
-053 * for each column version to delete.
-054 * p
-055 * Specifying timestamps, deleteFamily 
and deleteColumns will delete all
-056 * versions with a timestamp less than or 
equal to that passed.  If no
-057 * timestamp is specified, an entry is 
added with a timestamp of 'now'
-058 * where 'now' is the servers's 
System.currentTimeMillis().
-059 * Specifying a timestamp to the 
deleteColumn method will
-060 * delete versions only with a timestamp 
equal to that specified.
-061 * If no timestamp is passed to 
deleteColumn, internally, it figures the
-062 * most recent cell's timestamp and adds 
a delete at that timestamp; i.e.
-063 * it deletes the most recently added 
cell.
-064 * pThe timestamp passed to the 
constructor is used ONLY for delete of
-065 * rows.  For anything less -- a 
deleteColumn, deleteColumns or
-066 * deleteFamily -- then you need to use 
the method overrides that take a
-067 * timestamp.  The constructor timestamp 
is not referenced.
-068 */
-069@InterfaceAudience.Public
-070public class Delete extends Mutation 
implements ComparableRow {
-071  /**
-072   * Create a Delete operation for the 
specified row.
-073   * p
-074   * If no further operations are done, 
this will delete everything
-075   * associated with the specified row 
(all versions of all columns in all
-076   * families), with timestamp from 
current point in time to the past.
-077   * Cells defining timestamp for a 
future point in time
-078   * (timestamp  current time) will 
not be deleted.
-079   * @param row row key
-080   */
-081  public Delete(byte [] row) {
-082this(row, 
HConstants.LATEST_TIMESTAMP);
-083  }
-084
-085  /**
-086   * Create a Delete operation for the 
specified row and timestamp.p
-087   *
-088   * If no further operations are done, 
this will delete all columns in all
-089   * families of the specified row with a 
timestamp less than or equal to the
-090   * specified timestamp.p
-091   *
-092   * This timestamp is ONLY used for a 
delete row operation.  If specifying
-093   * families or columns, you must 
specify each timestamp individually.
-094   * @param row row key
-095   * @param timestamp maximum version 
timestamp (only for delete row)
-096   */
-097  public Delete(byte [] row, long 
timestamp) {
-098this(row, 0, row.length, 
timestamp);
-099  }
-100
-101  /**
-102   * Create a Delete operation for the 
specified row and timestamp.p
-103   *
-104   * If no further operations are done, 
this will delete all columns in all
-105   * families of the specified row with a 
timestamp less than or equal to the
-106   * specified timestamp.p
-107   *
-108   * This timestamp is ONLY used for a 
delete row operation.  If specifying
-109   * families or columns, you must 
specify each timestamp individually.
-110   * @param row We make a local copy of 
this passed in row.
-111   * @param rowOffset
-112   * @param rowLength
-113   */
-114  public Delete(final byte[] row, final 
int rowOffset, final int rowLength) {
-115this(row, rowOffset, rowLength, 
HConstants.LATEST_TIMESTAMP);
-116  }
-117
-118  /**
-119   * Create a Delete operation for the 
specified row and timestamp.p
-120   *
-121   * If no further operations are done, 
this will delete all columns in all
-122   * 

[51/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
Published site at .


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/d171b896
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/d171b896
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/d171b896

Branch: refs/heads/asf-site
Commit: d171b89655cf7b7527dbaca0bd8414a6fc5ee254
Parents: 89207f3
Author: jenkins 
Authored: Wed Dec 6 15:17:34 2017 +
Committer: jenkins 
Committed: Wed Dec 6 15:17:34 2017 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 28940 ++---
 apidocs/index-all.html  |28 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html | 2 +-
 .../org/apache/hadoop/hbase/client/Append.html  |63 +-
 .../org/apache/hadoop/hbase/client/Delete.html  |87 +-
 .../apache/hadoop/hbase/client/Increment.html   |83 +-
 .../apache/hadoop/hbase/client/Mutation.html|   279 +-
 apidocs/org/apache/hadoop/hbase/client/Put.html |   231 +-
 .../hadoop/hbase/client/class-use/Append.html   | 4 +
 .../hadoop/hbase/client/class-use/Delete.html   | 4 +-
 .../hbase/client/class-use/Increment.html   | 4 +
 .../hadoop/hbase/client/class-use/Mutation.html | 6 +
 .../hadoop/hbase/client/class-use/Put.html  | 4 +
 .../hbase/mapreduce/HFileOutputFormat2.html | 6 +-
 .../org/apache/hadoop/hbase/client/Append.html  |   354 +-
 .../org/apache/hadoop/hbase/client/Delete.html  |   667 +-
 .../apache/hadoop/hbase/client/Increment.html   |   565 +-
 .../apache/hadoop/hbase/client/Mutation.html|   904 +-
 .../org/apache/hadoop/hbase/client/Put.html |   826 +-
 .../hbase/mapreduce/HFileOutputFormat2.html |  1264 +-
 book.html   |75 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 36106 -
 checkstyle.rss  |58 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html |34 +-
 dependency-info.html| 4 +-
 dependency-management.html  |14 +-
 devapidocs/constant-values.html | 6 +-
 devapidocs/index-all.html   |34 +-
 .../hadoop/hbase/backup/package-tree.html   | 2 +-
 .../org/apache/hadoop/hbase/class-use/Cell.html | 2 +-
 .../org/apache/hadoop/hbase/client/Append.html  |63 +-
 .../org/apache/hadoop/hbase/client/Delete.html  |87 +-
 .../apache/hadoop/hbase/client/Increment.html   |87 +-
 .../apache/hadoop/hbase/client/Mutation.html|   307 +-
 .../org/apache/hadoop/hbase/client/Put.html |   260 +-
 .../hadoop/hbase/client/class-use/Append.html   | 4 +
 .../hadoop/hbase/client/class-use/Delete.html   | 4 +-
 .../hbase/client/class-use/Increment.html   | 4 +
 .../hadoop/hbase/client/class-use/Mutation.html | 6 +
 .../hadoop/hbase/client/class-use/Put.html  | 4 +
 .../hadoop/hbase/client/package-tree.html   |24 +-
 .../hbase/constraint/ConstraintProcessor.html   | 3 +-
 .../hbase/coprocessor/CoprocessorHost.html  | 2 +-
 .../hbase/coprocessor/ObserverContext.html  |10 +-
 .../hbase/coprocessor/ObserverContextImpl.html  | 6 +-
 .../hbase/coprocessor/RegionObserver.html   |   154 +-
 ...serverWithMetrics.ExampleRegionObserver.html | 3 +-
 .../example/WriteHeavyIncrementObserver.html| 6 +-
 .../hadoop/hbase/filter/package-tree.html   |10 +-
 .../org/apache/hadoop/hbase/fs/HFileSystem.html | 2 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 4 +-
 .../hadoop/hbase/ipc/NettyRpcFrameDecoder.html  | 6 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 2 +-
 .../HFileOutputFormat2.WriterLength.html| 8 +-
 .../hbase/mapreduce/HFileOutputFormat2.html |36 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 2 +-
 .../hbase/master/balancer/package-tree.html | 2 +-
 .../hadoop/hbase/master/package-tree.html   | 4 +-
 .../hbase/master/procedure/package-tree.html| 2 +-
 .../hadoop/hbase/monitoring/package-tree.html   | 2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |16 +-
 .../hadoop/hbase/procedure2/package-tree.html   | 6 +-
 .../hadoop/hbase/quotas/package-tree.html   | 8 +-
 .../hadoop/hbase/regionserver/package-tree.html |14 +-
 .../hbase/regionserver/wal/package-tree.html| 2 +-
 .../replication/regionserver/package-tree.html  | 2 +-
 .../hbase/security/access/AccessController.html |42 +-
 

[48/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/apidocs/org/apache/hadoop/hbase/client/Increment.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/Increment.html 
b/apidocs/org/apache/hadoop/hbase/client/Increment.html
index a051699..acf692c 100644
--- a/apidocs/org/apache/hadoop/hbase/client/Increment.html
+++ b/apidocs/org/apache/hadoop/hbase/client/Increment.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -129,7 +129,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class Increment
+public class Increment
 extends Mutation
 implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableRow
 Used to perform Increment operations on a single row.
@@ -337,11 +337,17 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Increment
+setTimestamp(longtimestamp)
+Set the timestamp of the delete.
+
+
+
+Increment
 setTTL(longttl)
 Set the TTL desired for the result of the mutation, in 
milliseconds.
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 toString()
 Produces a string representation of this Operation.
@@ -353,7 +359,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Methods inherited from classorg.apache.hadoop.hbase.client.Mutation
-cellScanner,
 getACL,
 getCellVisibility,
 getClusterIds,
 getDurability,
 getFamilyCellMap,
 getFingerprint,
 getRow,
 getTimeStamp,
 getTTL, heapSize,
 isEmpty,
 size,
 toMap
+cellScanner,
 get,
 getACL,
 getCellVisibility,
 getClusterIds,
 getDurability,
 getFamilyCellMap,
 getFingerprint,
 getRow,
 getTimeStamp, getTTL,
 has,
 has,
 has,
 has,
 has,
 heapSize,
 isEmpty,
 size,
 toMap
 
 
 
@@ -396,7 +402,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Increment
-publicIncrement(byte[]row)
+publicIncrement(byte[]row)
 Create a Increment operation for the specified row.
  
  At least one column must be incremented.
@@ -412,7 +418,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Increment
-publicIncrement(byte[]row,
+publicIncrement(byte[]row,
  intoffset,
  intlength)
 Create a Increment operation for the specified row.
@@ -430,7 +436,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 Increment
-publicIncrement(Incrementi)
+publicIncrement(Incrementi)
 Copy constructor
 
 Parameters:
@@ -452,7 +458,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 add
-publicIncrementadd(Cellcell)
+publicIncrementadd(Cellcell)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Add the specified KeyValue to this operation.
 
@@ -471,7 +477,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 addColumn
-publicIncrementaddColumn(byte[]family,
+publicIncrementaddColumn(byte[]family,
byte[]qualifier,
longamount)
 Increment the column from the specific family with the 
specified qualifier
@@ -494,7 +500,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 getTimeRange
-publicTimeRangegetTimeRange()
+publicTimeRangegetTimeRange()
 Gets the TimeRange used for this increment.
 
 Returns:
@@ -508,7 +514,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 setTimeRange
-publicIncrementsetTimeRange(longminStamp,
+publicIncrementsetTimeRange(longminStamp,
   longmaxStamp)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Sets the TimeRange to be used on the Get for this increment.
@@ -530,13 +536,28 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 
+
+
+
+
+
+setTimestamp
+publicIncrementsetTimestamp(longtimestamp)
+Description copied from 
class:Mutation
+Set the timestamp of the delete.
+
+Overrides:
+setTimestampin
 classMutation
+
+
+
 
 
 
 
 
 

[31/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.ExampleRegionObserver.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.ExampleRegionObserver.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.ExampleRegionObserver.html
index dbe5063..2e0cc38 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.ExampleRegionObserver.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/example/ExampleRegionObserverWithMetrics.ExampleRegionObserver.html
@@ -329,7 +329,8 @@ implements Description copied from 
interface:RegionObserver
 Called before the client performs a Get
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
 
 Specified by:
 preGetOpin
 interfaceRegionObserver

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.html
 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.html
index 5872418..9726790 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/coprocessor/example/WriteHeavyIncrementObserver.html
@@ -626,7 +626,8 @@ implements Description copied from 
interface:RegionObserver
 Called before the client performs a Get
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
 
 Specified by:
 preGetOpin
 interfaceRegionObserver
@@ -662,7 +663,8 @@ implements Description copied from 
interface:RegionObserver
 Called before Increment.
  
- Call CoprocessorEnvironment#bypass to skip default actions
+ Call CoprocessorEnvironment#bypass to skip default actions.
+ If 'bypass' is set, we skip out on calling any subsequent chained 
coprocessors.
  
  Note: Do not retain references to any Cells in 'increment' beyond the life of 
this invocation.
  If need a Cell reference for later use, copy the cell and use that.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 2c57415..e2ea452 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -182,14 +182,14 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.filter.FuzzyRowFilter.SatisfiesCode
-org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
-org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
-org.apache.hadoop.hbase.filter.FilterList.Operator
-org.apache.hadoop.hbase.filter.Filter.ReturnCode
 org.apache.hadoop.hbase.filter.FilterWrapper.FilterRowRetCode
+org.apache.hadoop.hbase.filter.Filter.ReturnCode
+org.apache.hadoop.hbase.filter.FilterList.Operator
 org.apache.hadoop.hbase.filter.FuzzyRowFilter.Order
 org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
+org.apache.hadoop.hbase.filter.FuzzyRowFilter.SatisfiesCode
+org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
+org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.html 
b/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.html
index 4e4e827..760b068 100644
--- a/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.html
+++ b/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.html
@@ -474,7 +474,7 @@ extends org.apache.hadoop.fs.FilterFileSystem
 HFileSystem
 publicHFileSystem(org.apache.hadoop.fs.FileSystemfs)
 Wrap a FileSystem 

[43/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
index 00db109..9c710d6 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
@@ -36,32 +36,32 @@
 028import java.util.NavigableMap;
 029import java.util.TreeMap;
 030import java.util.UUID;
-031
-032import org.apache.hadoop.hbase.Cell;
-033import 
org.apache.hadoop.hbase.CellScannable;
-034import 
org.apache.hadoop.hbase.CellScanner;
-035import 
org.apache.hadoop.hbase.CellUtil;
-036import 
org.apache.hadoop.hbase.HConstants;
-037import 
org.apache.hadoop.hbase.KeyValue;
-038import 
org.apache.hadoop.hbase.PrivateCellUtil;
-039import org.apache.hadoop.hbase.Tag;
-040import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-041import 
org.apache.hadoop.hbase.io.HeapSize;
-042import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-043import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
-044import 
org.apache.hadoop.hbase.security.access.AccessControlConstants;
-045import 
org.apache.hadoop.hbase.security.access.AccessControlUtil;
-046import 
org.apache.hadoop.hbase.security.access.Permission;
-047import 
org.apache.hadoop.hbase.security.visibility.CellVisibility;
-048import 
org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
-049import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
-050import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
-051import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput;
-052import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput;
-053import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams;
-054import 
org.apache.hadoop.hbase.util.Bytes;
-055import 
org.apache.hadoop.hbase.util.ClassSize;
-056import 
org.apache.yetus.audience.InterfaceAudience;
+031import org.apache.hadoop.hbase.Cell;
+032import 
org.apache.hadoop.hbase.CellScannable;
+033import 
org.apache.hadoop.hbase.CellScanner;
+034import 
org.apache.hadoop.hbase.CellUtil;
+035import 
org.apache.hadoop.hbase.HConstants;
+036import 
org.apache.hadoop.hbase.KeyValue;
+037import 
org.apache.hadoop.hbase.PrivateCellUtil;
+038import org.apache.hadoop.hbase.Tag;
+039import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
+040import 
org.apache.hadoop.hbase.io.HeapSize;
+041import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+042import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
+043import 
org.apache.hadoop.hbase.security.access.AccessControlConstants;
+044import 
org.apache.hadoop.hbase.security.access.AccessControlUtil;
+045import 
org.apache.hadoop.hbase.security.access.Permission;
+046import 
org.apache.hadoop.hbase.security.visibility.CellVisibility;
+047import 
org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
+048import 
org.apache.hadoop.hbase.util.Bytes;
+049import 
org.apache.hadoop.hbase.util.ClassSize;
+050import 
org.apache.yetus.audience.InterfaceAudience;
+051
+052import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
+053import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
+054import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput;
+055import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput;
+056import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams;
 057
 058@InterfaceAudience.Public
 059public abstract class Mutation extends 
OperationWithAttributes implements Row, CellScannable,
@@ -223,360 +223,508 @@
 215if (getTTL() != Long.MAX_VALUE) {
 216  map.put("ttl", getTTL());
 217}
-218return map;
-219  }
-220
-221  private static MapString, 
Object cellToStringMap(Cell c) {
-222MapString, Object stringMap = 
new HashMap();
-223stringMap.put("qualifier", 
Bytes.toStringBinary(c.getQualifierArray(), c.getQualifierOffset(),
-224
c.getQualifierLength()));
-225stringMap.put("timestamp", 
c.getTimestamp());
-226stringMap.put("vlen", 
c.getValueLength());
-227ListTag tags = 
PrivateCellUtil.getTags(c);
-228if (tags != null) {
-229  ListString tagsString = new 
ArrayList(tags.size());
-230  for (Tag t : tags) {
-231tagsString
-232.add((t.getType()) + ":" + 
Bytes.toStringBinary(Tag.cloneValue(t)));
-233  }
-234  stringMap.put("tag", tagsString);
-235}
-236return stringMap;
-237  }
-238
-239  /**
-240   * Set the durability for this 
mutation
-241   * @param d
-242   */
-243  public Mutation 
setDurability(Durability d) {
-244this.durability = d;
-245return this;
-246  }
-247
-248  /** 

[37/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 3985a62..25e8044 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Reactor Dependency Convergence
 
@@ -658,22 +658,22 @@
 3.4.10
 
 
-org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.zookeeper:zookeeper:jar:3.4.10:compile|+-org.apache.hadoop:hadoop-common:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.had
 oop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAP
 SHOT:test|+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-minicluster:jar:2.7.4:test|+-org.apache.hadoop:hadoop-common:test-jar:tests:2.7.4:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.7.4:test|\-org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.7.4:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for dupli
 cate)+-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)\-org.apache.hbase:hbase-rsgroup:jar:3.0.0-SNAPSHOT:compile\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile 
- version managed from 3.4.6
 ; omitted for duplicate)
-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for dup
 licate)|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 
version managed from 3.4.6; omitted for duplicat
 e)+-org.apache.hbase:hbase-mapreduce:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 
version managed from 3.4.6; omitted for 

[50/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 472021a..de4dbaa 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,16 +5,16 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20171205144750+00'00')
-/CreationDate (D:20171205144750+00'00')
+/ModDate (D:20171206144745+00'00')
+/CreationDate (D:20171206144745+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
 /Names 26 0 R
-/Outlines 4519 0 R
-/PageLabels 4743 0 R
+/Outlines 4517 0 R
+/PageLabels 4741 0 R
 /PageMode /UseOutlines
 /OpenAction [7 0 R /FitH 842.89]
 /ViewerPreferences << /DisplayDocTitle true
@@ -24,7 +24,7 @@ endobj
 3 0 obj
 << /Type /Pages
 /Count 702
-/Kids [7 0 R 12 0 R 14 0 R 16 0 R 18 0 R 20 0 R 22 0 R 24 0 R 44 0 R 47 0 R 50 
0 R 54 0 R 63 0 R 66 0 R 69 0 R 71 0 R 76 0 R 80 0 R 83 0 R 89 0 R 91 0 R 94 0 
R 96 0 R 103 0 R 109 0 R 114 0 R 116 0 R 130 0 R 133 0 R 142 0 R 151 0 R 161 0 
R 170 0 R 181 0 R 185 0 R 187 0 R 191 0 R 202 0 R 209 0 R 218 0 R 226 0 R 231 0 
R 240 0 R 248 0 R 257 0 R 271 0 R 278 0 R 287 0 R 296 0 R 304 0 R 311 0 R 319 0 
R 325 0 R 332 0 R 339 0 R 347 0 R 356 0 R 366 0 R 378 0 R 387 0 R 394 0 R 401 0 
R 409 0 R 417 0 R 426 0 R 435 0 R 443 0 R 451 0 R 463 0 R 471 0 R 478 0 R 486 0 
R 494 0 R 502 0 R 511 0 R 516 0 R 519 0 R 524 0 R 528 0 R 544 0 R 554 0 R 559 0 
R 573 0 R 579 0 R 584 0 R 586 0 R 588 0 R 591 0 R 593 0 R 597 0 R 605 0 R 611 0 
R 616 0 R 621 0 R 632 0 R 643 0 R 648 0 R 656 0 R 660 0 R 664 0 R 666 0 R 681 0 
R 695 0 R 705 0 R 707 0 R 709 0 R 718 0 R 730 0 R 740 0 R 748 0 R 754 0 R 757 0 
R 761 0 R 765 0 R 768 0 R 771 0 R 773 0 R 776 0 R 780 0 R 782 0 R 787 0 R 791 0 
R 796 0 R 800 0 R 804 0 R 810 0 R 812 0 
 R 816 0 R 825 0 R 827 0 R 830 0 R 834 0 R 837 0 R 840 0 R 854 0 R 861 0 R 869 
0 R 880 0 R 886 0 R 898 0 R 902 0 R 905 0 R 909 0 R 912 0 R 917 0 R 926 0 R 934 
0 R 938 0 R 942 0 R 947 0 R 951 0 R 953 0 R 968 0 R 979 0 R 984 0 R 991 0 R 994 
0 R 1003 0 R 1012 0 R 1016 0 R 1021 0 R 1026 0 R 1028 0 R 1030 0 R 1032 0 R 
1042 0 R 1050 0 R 1054 0 R 1061 0 R 1068 0 R 1076 0 R 1081 0 R 1086 0 R 1091 0 
R 1099 0 R 1103 0 R 1108 0 R 1110 0 R 1117 0 R 1123 0 R 1125 0 R 1132 0 R 1142 
0 R 1146 0 R 1148 0 R 1150 0 R 1154 0 R 1157 0 R 1162 0 R 1165 0 R 1177 0 R 
1181 0 R 1187 0 R 1195 0 R 1200 0 R 1204 0 R 1208 0 R 1210 0 R 1213 0 R 1216 0 
R 1219 0 R 1224 0 R 1228 0 R 1232 0 R 1237 0 R 1241 0 R 1245 0 R 1247 0 R 1257 
0 R 1260 0 R 1268 0 R 1277 0 R 1283 0 R 1287 0 R 1289 0 R 1299 0 R 1302 0 R 
1308 0 R 1317 0 R 1320 0 R 1327 0 R 1335 0 R 1337 0 R 1339 0 R 1348 0 R 1350 0 
R 1352 0 R 1355 0 R 1357 0 R 1359 0 R 1361 0 R 1363 0 R 1366 0 R 1370 0 R 1375 
0 R 1377 0 R 1379 0 R 1381 0 R 1386 0 R 1393 0 R 1399 0 R
  1402 0 R 1404 0 R 1407 0 R 1411 0 R 1413 0 R 1416 0 R 1418 0 R 1420 0 R 1423 
0 R 1428 0 R 1434 0 R 1442 0 R 1456 0 R 1470 0 R 1473 0 R 1478 0 R 1491 0 R 
1500 0 R 1514 0 R 1520 0 R 1529 0 R 1544 0 R 1558 0 R 1570 0 R 1575 0 R 1581 0 
R 1592 0 R 1598 0 R 1603 0 R 1611 0 R 1614 0 R 1623 0 R 1630 0 R 1634 0 R 1647 
0 R 1649 0 R 1655 0 R 1659 0 R 1661 0 R 1669 0 R 1677 0 R 1681 0 R 1683 0 R 
1685 0 R 1697 0 R 1703 0 R 1712 0 R 1718 0 R 1732 0 R 1737 0 R 1746 0 R 1754 0 
R 1760 0 R 1767 0 R 1771 0 R 1774 0 R 1776 0 R 1784 0 R 1788 0 R 1794 0 R 1798 
0 R 1806 0 R 1811 0 R 1817 0 R 1822 0 R 1824 0 R 1833 0 R 1840 0 R 1846 0 R 
1851 0 R 1855 0 R 1858 0 R 1863 0 R 1869 0 R 1876 0 R 1878 0 R 1880 0 R 1883 0 
R 1891 0 R 1894 0 R 1901 0 R 1910 0 R 1913 0 R 1918 0 R 1920 0 R 1923 0 R 1926 
0 R 1929 0 R 1936 0 R 1941 0 R 1943 0 R 1951 0 R 1958 0 R 1965 0 R 1971 0 R 
1976 0 R 1978 0 R 1987 0 R 1997 0 R 2007 0 R 2013 0 R 2021 0 R 2023 0 R 2026 0 
R 2028 0 R 2031 0 R 2034 0 R 2037 0 R 2042 0 R 2046 0 R 2057 0
  R 2060 0 R 2065 0 R 2068 0 R 2070 0 R 2075 0 R 2085 0 R 2087 0 R 2089 0 R 
2091 0 R 2093 0 R 2096 0 R 2098 0 R 2100 0 R 2103 0 R 2105 0 R 2107 0 R 2111 0 
R 2116 0 R 2125 0 R 2127 0 R 2129 0 R 2136 0 R 2138 0 R 2143 0 R 2145 0 R 2147 
0 R 2154 0 R 2159 0 R 2163 0 R 2167 0 R 2171 0 R 2173 0 R 2175 0 R 2179 0 R 
2182 0 R 2184 0 R 2186 0 R 2190 0 R 2192 0 R 2195 0 R 2197 0 R 2199 0 R 2201 0 
R 2208 0 R 2211 0 R 2216 0 R 2218 0 R 2220 0 R  0 R 2224 0 R 2232 0 R 2243 
0 R 2257 0 R 2268 0 R 2272 0 R 2278 0 R 2282 0 R 2285 0 R 2290 0 R 2296 0 R 
2298 0 R 2301 0 R 2303 0 R 2305 0 R 2307 0 R 2311 0 R 2313 0 R 2326 0 R 2329 0 
R 2337 0 R 2343 0 R 2355 0 R 2369 0 R 2383 0 R 2400 0 R 2404 0 R 2406 0 R 2410 
0 R 2428 0 R 2434 0 R 2446 0 R 2450 0 R 2454 0 R 2463 0 R 2473 0 R 2478 0 R 
2489 0 R 2502 0 R 2520 0 R 2529 0 R 2532 0 R 2541 0 R 2559 0 R 2566 0 R 2569 0 
R 2574 0 R 2578 0 R 2581 0 R 2590 0 R 2599 0 R 2602 0 R 2604 0 R 2608 0 R 2623 
0 R 2631 0 R 2636 0 R 2641 0 R 2644 0 R 2646 

[27/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
index 00db109..9c710d6 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/Mutation.html
@@ -36,32 +36,32 @@
 028import java.util.NavigableMap;
 029import java.util.TreeMap;
 030import java.util.UUID;
-031
-032import org.apache.hadoop.hbase.Cell;
-033import 
org.apache.hadoop.hbase.CellScannable;
-034import 
org.apache.hadoop.hbase.CellScanner;
-035import 
org.apache.hadoop.hbase.CellUtil;
-036import 
org.apache.hadoop.hbase.HConstants;
-037import 
org.apache.hadoop.hbase.KeyValue;
-038import 
org.apache.hadoop.hbase.PrivateCellUtil;
-039import org.apache.hadoop.hbase.Tag;
-040import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-041import 
org.apache.hadoop.hbase.io.HeapSize;
-042import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-043import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
-044import 
org.apache.hadoop.hbase.security.access.AccessControlConstants;
-045import 
org.apache.hadoop.hbase.security.access.AccessControlUtil;
-046import 
org.apache.hadoop.hbase.security.access.Permission;
-047import 
org.apache.hadoop.hbase.security.visibility.CellVisibility;
-048import 
org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
-049import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
-050import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
-051import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput;
-052import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput;
-053import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams;
-054import 
org.apache.hadoop.hbase.util.Bytes;
-055import 
org.apache.hadoop.hbase.util.ClassSize;
-056import 
org.apache.yetus.audience.InterfaceAudience;
+031import org.apache.hadoop.hbase.Cell;
+032import 
org.apache.hadoop.hbase.CellScannable;
+033import 
org.apache.hadoop.hbase.CellScanner;
+034import 
org.apache.hadoop.hbase.CellUtil;
+035import 
org.apache.hadoop.hbase.HConstants;
+036import 
org.apache.hadoop.hbase.KeyValue;
+037import 
org.apache.hadoop.hbase.PrivateCellUtil;
+038import org.apache.hadoop.hbase.Tag;
+039import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
+040import 
org.apache.hadoop.hbase.io.HeapSize;
+041import 
org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+042import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
+043import 
org.apache.hadoop.hbase.security.access.AccessControlConstants;
+044import 
org.apache.hadoop.hbase.security.access.AccessControlUtil;
+045import 
org.apache.hadoop.hbase.security.access.Permission;
+046import 
org.apache.hadoop.hbase.security.visibility.CellVisibility;
+047import 
org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
+048import 
org.apache.hadoop.hbase.util.Bytes;
+049import 
org.apache.hadoop.hbase.util.ClassSize;
+050import 
org.apache.yetus.audience.InterfaceAudience;
+051
+052import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
+053import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
+054import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataInput;
+055import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteArrayDataOutput;
+056import 
org.apache.hadoop.hbase.shaded.com.google.common.io.ByteStreams;
 057
 058@InterfaceAudience.Public
 059public abstract class Mutation extends 
OperationWithAttributes implements Row, CellScannable,
@@ -223,360 +223,508 @@
 215if (getTTL() != Long.MAX_VALUE) {
 216  map.put("ttl", getTTL());
 217}
-218return map;
-219  }
-220
-221  private static MapString, 
Object cellToStringMap(Cell c) {
-222MapString, Object stringMap = 
new HashMap();
-223stringMap.put("qualifier", 
Bytes.toStringBinary(c.getQualifierArray(), c.getQualifierOffset(),
-224
c.getQualifierLength()));
-225stringMap.put("timestamp", 
c.getTimestamp());
-226stringMap.put("vlen", 
c.getValueLength());
-227ListTag tags = 
PrivateCellUtil.getTags(c);
-228if (tags != null) {
-229  ListString tagsString = new 
ArrayList(tags.size());
-230  for (Tag t : tags) {
-231tagsString
-232.add((t.getType()) + ":" + 
Bytes.toStringBinary(Tag.cloneValue(t)));
-233  }
-234  stringMap.put("tag", tagsString);
-235}
-236return stringMap;
-237  }
-238
-239  /**
-240   * Set the durability for this 
mutation
-241   * @param d
-242   */
-243  public Mutation 
setDurability(Durability d) {
-244this.durability = d;
-245return this;
-246  }

[46/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/apidocs/src-html/org/apache/hadoop/hbase/client/Append.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Append.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Append.html
index 2866e16..4678f40 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Append.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Append.html
@@ -25,181 +25,185 @@
 017 */
 018package org.apache.hadoop.hbase.client;
 019
-020import java.util.ArrayList;
-021import java.util.List;
-022import java.util.Map;
-023import java.util.NavigableMap;
-024import java.util.UUID;
-025
-026import org.apache.hadoop.hbase.Cell;
-027import 
org.apache.hadoop.hbase.CellUtil;
-028import 
org.apache.hadoop.hbase.KeyValue;
-029import 
org.apache.yetus.audience.InterfaceAudience;
-030import 
org.apache.hadoop.hbase.security.access.Permission;
-031import 
org.apache.hadoop.hbase.security.visibility.CellVisibility;
-032import 
org.apache.hadoop.hbase.util.Bytes;
-033
-034/**
-035 * Performs Append operations on a single 
row.
-036 * p
-037 * This operation ensures atomicty to 
readers. Appends are done
-038 * under a single row lock, so write 
operations to a row are synchronized, and
-039 * readers are guaranteed to see this 
operation fully completed.
-040 * p
-041 * To append to a set of columns of a 
row, instantiate an Append object with the
-042 * row to append to. At least one column 
to append must be specified using the
-043 * {@link #addColumn(byte[], byte[], 
byte[])} method.
-044 */
-045@InterfaceAudience.Public
-046public class Append extends Mutation {
-047  /**
-048   * @param returnResults
-049   *  True (default) if the 
append operation should return the results.
-050   *  A client that is not 
interested in the result can save network
-051   *  bandwidth setting this to 
false.
-052   */
-053  public Append setReturnResults(boolean 
returnResults) {
-054
super.setReturnResults(returnResults);
-055return this;
-056  }
-057
-058  /**
-059   * @return current setting for 
returnResults
-060   */
-061  // This method makes public the 
superclasses's protected method.
-062  public boolean isReturnResults() {
-063return super.isReturnResults();
-064  }
-065
-066  /**
-067   * Create a Append operation for the 
specified row.
-068   * p
-069   * At least one column must be appended 
to.
-070   * @param row row key; makes a local 
copy of passed in array.
-071   */
-072  public Append(byte[] row) {
-073this(row, 0, row.length);
-074  }
-075  /**
-076   * Copy constructor
-077   * @param a
-078   */
-079  public Append(Append a) {
-080this.row = a.getRow();
-081this.ts = a.getTimeStamp();
-082
this.familyMap.putAll(a.getFamilyCellMap());
-083for (Map.EntryString, byte[] 
entry : a.getAttributesMap().entrySet()) {
-084  this.setAttribute(entry.getKey(), 
entry.getValue());
-085}
-086this.setPriority(a.getPriority());
-087  }
-088
-089  /** Create a Append operation for the 
specified row.
-090   * p
-091   * At least one column must be appended 
to.
-092   * @param rowArray Makes a copy out of 
this buffer.
-093   * @param rowOffset
-094   * @param rowLength
-095   */
-096  public Append(final byte [] rowArray, 
final int rowOffset, final int rowLength) {
-097checkRow(rowArray, rowOffset, 
rowLength);
-098this.row = Bytes.copy(rowArray, 
rowOffset, rowLength);
-099  }
-100
-101  /**
-102   * Add the specified column and value 
to this Append operation.
-103   * @param family family name
-104   * @param qualifier column qualifier
-105   * @param value value to append to 
specified column
-106   * @return this
-107   * @deprecated As of release 2.0.0, 
this will be removed in HBase 3.0.0.
-108   * Use {@link 
#addColumn(byte[], byte[], byte[])} instead
-109   */
-110  @Deprecated
-111  public Append add(byte [] family, byte 
[] qualifier, byte [] value) {
-112return this.addColumn(family, 
qualifier, value);
-113  }
-114
-115  /**
-116   * Add the specified column and value 
to this Append operation.
-117   * @param family family name
-118   * @param qualifier column qualifier
-119   * @param value value to append to 
specified column
-120   * @return this
-121   */
-122  public Append addColumn(byte[] family, 
byte[] qualifier, byte[] value) {
-123KeyValue kv = new KeyValue(this.row, 
family, qualifier, this.ts, KeyValue.Type.Put, value);
-124return add(kv);
-125  }
-126
-127  /**
-128   * Add column and value to this Append 
operation.
-129   * @param cell
-130   * @return This instance
-131   */
-132  @SuppressWarnings("unchecked")
-133  public Append add(final Cell cell) {
-134// Presume it is KeyValue for now.
-135byte [] family = 
CellUtil.cloneFamily(cell);
-136
-137// Get cell list for the family
-138ListCell list = 
getCellList(family);
-139
-140// find 

[29/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/src-html/org/apache/hadoop/hbase/client/Delete.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/client/Delete.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/Delete.html
index 686fe97..e2e01c7 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/Delete.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/Delete.html
@@ -32,348 +32,331 @@
 024import java.util.Map;
 025import java.util.NavigableMap;
 026import java.util.UUID;
-027
-028import org.apache.hadoop.hbase.Cell;
-029import 
org.apache.hadoop.hbase.CellUtil;
-030import 
org.apache.hadoop.hbase.HConstants;
-031import 
org.apache.hadoop.hbase.KeyValue;
-032import 
org.apache.yetus.audience.InterfaceAudience;
-033import 
org.apache.hadoop.hbase.security.access.Permission;
-034import 
org.apache.hadoop.hbase.security.visibility.CellVisibility;
-035import 
org.apache.hadoop.hbase.util.Bytes;
-036
-037/**
-038 * Used to perform Delete operations on a 
single row.
-039 * p
-040 * To delete an entire row, instantiate a 
Delete object with the row
-041 * to delete.  To further define the 
scope of what to delete, perform
-042 * additional methods as outlined 
below.
-043 * p
-044 * To delete specific families, execute 
{@link #addFamily(byte[]) deleteFamily}
-045 * for each family to delete.
-046 * p
-047 * To delete multiple versions of 
specific columns, execute
-048 * {@link #addColumns(byte[], byte[]) 
deleteColumns}
-049 * for each column to delete.
-050 * p
-051 * To delete specific versions of 
specific columns, execute
-052 * {@link #addColumn(byte[], byte[], 
long) deleteColumn}
-053 * for each column version to delete.
-054 * p
-055 * Specifying timestamps, deleteFamily 
and deleteColumns will delete all
-056 * versions with a timestamp less than or 
equal to that passed.  If no
-057 * timestamp is specified, an entry is 
added with a timestamp of 'now'
-058 * where 'now' is the servers's 
System.currentTimeMillis().
-059 * Specifying a timestamp to the 
deleteColumn method will
-060 * delete versions only with a timestamp 
equal to that specified.
-061 * If no timestamp is passed to 
deleteColumn, internally, it figures the
-062 * most recent cell's timestamp and adds 
a delete at that timestamp; i.e.
-063 * it deletes the most recently added 
cell.
-064 * pThe timestamp passed to the 
constructor is used ONLY for delete of
-065 * rows.  For anything less -- a 
deleteColumn, deleteColumns or
-066 * deleteFamily -- then you need to use 
the method overrides that take a
-067 * timestamp.  The constructor timestamp 
is not referenced.
-068 */
-069@InterfaceAudience.Public
-070public class Delete extends Mutation 
implements ComparableRow {
-071  /**
-072   * Create a Delete operation for the 
specified row.
-073   * p
-074   * If no further operations are done, 
this will delete everything
-075   * associated with the specified row 
(all versions of all columns in all
-076   * families), with timestamp from 
current point in time to the past.
-077   * Cells defining timestamp for a 
future point in time
-078   * (timestamp  current time) will 
not be deleted.
-079   * @param row row key
-080   */
-081  public Delete(byte [] row) {
-082this(row, 
HConstants.LATEST_TIMESTAMP);
-083  }
-084
-085  /**
-086   * Create a Delete operation for the 
specified row and timestamp.p
-087   *
-088   * If no further operations are done, 
this will delete all columns in all
-089   * families of the specified row with a 
timestamp less than or equal to the
-090   * specified timestamp.p
-091   *
-092   * This timestamp is ONLY used for a 
delete row operation.  If specifying
-093   * families or columns, you must 
specify each timestamp individually.
-094   * @param row row key
-095   * @param timestamp maximum version 
timestamp (only for delete row)
-096   */
-097  public Delete(byte [] row, long 
timestamp) {
-098this(row, 0, row.length, 
timestamp);
-099  }
-100
-101  /**
-102   * Create a Delete operation for the 
specified row and timestamp.p
-103   *
-104   * If no further operations are done, 
this will delete all columns in all
-105   * families of the specified row with a 
timestamp less than or equal to the
-106   * specified timestamp.p
-107   *
-108   * This timestamp is ONLY used for a 
delete row operation.  If specifying
-109   * families or columns, you must 
specify each timestamp individually.
-110   * @param row We make a local copy of 
this passed in row.
-111   * @param rowOffset
-112   * @param rowLength
-113   */
-114  public Delete(final byte[] row, final 
int rowOffset, final int rowLength) {
-115this(row, rowOffset, rowLength, 
HConstants.LATEST_TIMESTAMP);
-116  }
-117
-118  /**
-119   * Create a Delete operation for the 
specified row and timestamp.p
-120   *
-121   * If no further operations are done, 
this will delete all columns in all

[34/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/org/apache/hadoop/hbase/client/Mutation.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/Mutation.html 
b/devapidocs/org/apache/hadoop/hbase/client/Mutation.html
index 9d61318..960e9dd 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/Mutation.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/Mutation.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":9,"i2":9,"i3":9,"i4":9,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":9,"i34":9,"i35":9,"i36":10};
+var methods = 
{"i0":10,"i1":9,"i2":9,"i3":9,"i4":9,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":9,"i41":9,"i42":9,"i43":10};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -282,146 +282,204 @@ implements 
 
 
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCell
+get(byte[]family,
+   byte[]qualifier)
+Returns a list of all KeyValue objects with matching column 
family and qualifier.
+
+
+
 byte[]
 getACL()
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCell
 getCellList(byte[]family)
 Creates an empty list if one doesn't exist for the given 
column family
  or else it returns the associated list of Cell objects.
 
 
-
+
 CellVisibility
 getCellVisibility()
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/util/UUID.html?is-external=true;
 title="class or interface in java.util">UUID
 getClusterIds()
 
-
+
 Durability
 getDurability()
 Get the current durability
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapbyte[],http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListCell
 getFamilyCellMap()
 Method for retrieving the put's familyMap
 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 getFingerprint()
 Compile the column family (i.e.
 
 
-
+
 byte[]
 getRow()
 Method for retrieving the delete's row
 
 
-
+
 long
 getTimeStamp()
 Method for retrieving the timestamp
 
 
-
+
 long
 getTTL()
 Return the TTL requested for the result of the mutation, in 
milliseconds.
 
 
-
+
+boolean
+has(byte[]family,
+   byte[]qualifier)
+A convenience method to determine if this object's 
familyMap contains
+ a value assigned to the given family  qualifier.
+
+
+
+boolean
+has(byte[]family,
+   byte[]qualifier,
+   byte[]value)
+A convenience method to determine if this object's 
familyMap contains
+ a value assigned to the given family, qualifier and timestamp.
+
+
+
+boolean
+has(byte[]family,
+   byte[]qualifier,
+   longts)
+A convenience method to determine if this object's 
familyMap contains
+ a value assigned to the given family, qualifier and timestamp.
+
+
+
+boolean
+has(byte[]family,
+   byte[]qualifier,
+   longts,
+   byte[]value)
+A convenience method to determine if this object's 
familyMap contains
+ the given value assigned to the given family, qualifier and timestamp.
+
+
+
+protected boolean
+has(byte[]family,
+   byte[]qualifier,
+   longts,
+   byte[]value,
+   booleanignoreTS,
+   booleanignoreValue)
+
+
 long
 heapSize()
 
-
+
 boolean
 isEmpty()
 Method to check if the familyMap is empty
 
 
-
+
 protected boolean
 isReturnResults()
 
-
+
 int
 numFamilies()
 
-
+
 Mutation
 setACL(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,Permissionperms)
 
-
+
 Mutation
 setACL(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 

[11/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-convergence.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-convergence.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-convergence.html
index 14e1368..a473d38 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-convergence.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetype builder  Reactor Dependency 
Convergence
 
@@ -488,22 +488,22 @@
 3.4.10
 
 
-org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.zookeeper:zookeeper:jar:3.4.10:compile|+-org.apache.hadoop:hadoop-common:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.had
 oop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAP
 SHOT:test|+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-minicluster:jar:2.7.4:test|+-org.apache.hadoop:hadoop-common:test-jar:tests:2.7.4:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.7.4:test|\-org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.7.4:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for dupli
 cate)+-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)\-org.apache.hbase:hbase-rsgroup:jar:3.0.0-SNAPSHOT:compile\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile 
- version managed from 3.4.6
 ; omitted for duplicate)
-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for dup
 licate)|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for 

[25/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.EnvironmentPriorityComparator.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.EnvironmentPriorityComparator.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.EnvironmentPriorityComparator.html
index 7f3de9e..557f64e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.EnvironmentPriorityComparator.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.EnvironmentPriorityComparator.html
@@ -686,51 +686,56 @@
 678  // Internal to shouldBypass, it 
checks if obeserverOperation#isBypassable().
 679  bypass |= 
observerOperation.shouldBypass();
 680  observerOperation.postEnvCall();
-681}
-682return bypass;
-683  }
-684
-685  /**
-686   * Coprocessor classes can be 
configured in any order, based on that priority is set and
-687   * chained in a sorted order. Should be 
used preStop*() hooks i.e. when master/regionserver is
-688   * going down. This function first 
calls coprocessor methods (using ObserverOperation.call())
-689   * and then shutdowns the environment 
in postEnvCall(). br
-690   * Need to execute all coprocessor 
methods first then postEnvCall(), otherwise some coprocessors
-691   * may remain shutdown if any exception 
occurs during next coprocessor execution which prevent
-692   * master/regionserver stop or cluster 
shutdown. (Refer:
-693   * a 
href="https://issues.apache.org/jira/browse/HBASE-16663"HBASE-16663/a;
-694   * @return true if bypaas coprocessor 
execution, false if not.
-695   * @throws IOException
-696   */
-697  protected O boolean 
execShutdown(final ObserverOperationO observerOperation)
-698  throws IOException {
-699if (observerOperation == null) return 
false;
-700boolean bypass = false;
-701ListE envs = 
coprocEnvironments.get();
-702// Iterate the coprocessors and 
execute ObserverOperation's call()
-703for (E env : envs) {
-704  observerOperation.prepare(env);
-705  Thread currentThread = 
Thread.currentThread();
-706  ClassLoader cl = 
currentThread.getContextClassLoader();
-707  try {
-708
currentThread.setContextClassLoader(env.getClassLoader());
-709
observerOperation.callObserver();
-710  } catch (Throwable e) {
-711handleCoprocessorThrowable(env, 
e);
-712  } finally {
-713
currentThread.setContextClassLoader(cl);
-714  }
-715  bypass |= 
observerOperation.shouldBypass();
-716}
-717
-718// Iterate the coprocessors and 
execute ObserverOperation's postEnvCall()
-719for (E env : envs) {
-720  observerOperation.prepare(env);
-721  observerOperation.postEnvCall();
-722}
-723return bypass;
-724  }
-725}
+681  if (bypass) {
+682// If CP says bypass, skip out 
w/o calling any following CPs; they might ruin our response.
+683// In hbase1, this used to be 
called 'complete'. In hbase2, we unite bypass and 'complete'.
+684break;
+685  }
+686}
+687return bypass;
+688  }
+689
+690  /**
+691   * Coprocessor classes can be 
configured in any order, based on that priority is set and
+692   * chained in a sorted order. Should be 
used preStop*() hooks i.e. when master/regionserver is
+693   * going down. This function first 
calls coprocessor methods (using ObserverOperation.call())
+694   * and then shutdowns the environment 
in postEnvCall(). br
+695   * Need to execute all coprocessor 
methods first then postEnvCall(), otherwise some coprocessors
+696   * may remain shutdown if any exception 
occurs during next coprocessor execution which prevent
+697   * master/regionserver stop or cluster 
shutdown. (Refer:
+698   * a 
href="https://issues.apache.org/jira/browse/HBASE-16663"HBASE-16663/a;
+699   * @return true if bypaas coprocessor 
execution, false if not.
+700   * @throws IOException
+701   */
+702  protected O boolean 
execShutdown(final ObserverOperationO observerOperation)
+703  throws IOException {
+704if (observerOperation == null) return 
false;
+705boolean bypass = false;
+706ListE envs = 
coprocEnvironments.get();
+707// Iterate the coprocessors and 
execute ObserverOperation's call()
+708for (E env : envs) {
+709  observerOperation.prepare(env);
+710  Thread currentThread = 
Thread.currentThread();
+711  ClassLoader cl = 
currentThread.getContextClassLoader();
+712  try {
+713
currentThread.setContextClassLoader(env.getClassLoader());
+714
observerOperation.callObserver();
+715  } catch (Throwable e) {
+716handleCoprocessorThrowable(env, 
e);
+717  } finally {
+718
currentThread.setContextClassLoader(cl);
+719  }
+720  bypass |= 
observerOperation.shouldBypass();
+721 

[22/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderBlocks.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderBlocks.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderBlocks.html
index 1daa9e8..5636600 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderBlocks.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderBlocks.html
@@ -125,7 +125,7 @@
 117
 118  /**
 119   * Wrap a FileSystem object within a 
HFileSystem. The noChecksumFs and
-120   * writefs are both set to be the same 
specified fs. 
+120   * writefs are both set to be the same 
specified fs.
 121   * Do not verify hbase-checksums while 
reading data from filesystem.
 122   * @param fs Set the noChecksumFs and 
writeFs to this specified filesystem.
 123   */

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderWALBlocks.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderWALBlocks.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderWALBlocks.html
index 1daa9e8..5636600 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderWALBlocks.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderWALBlocks.html
@@ -125,7 +125,7 @@
 117
 118  /**
 119   * Wrap a FileSystem object within a 
HFileSystem. The noChecksumFs and
-120   * writefs are both set to be the same 
specified fs. 
+120   * writefs are both set to be the same 
specified fs.
 121   * Do not verify hbase-checksums while 
reading data from filesystem.
 122   * @param fs Set the noChecksumFs and 
writeFs to this specified filesystem.
 123   */

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.html
index 1daa9e8..5636600 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.html
@@ -125,7 +125,7 @@
 117
 118  /**
 119   * Wrap a FileSystem object within a 
HFileSystem. The noChecksumFs and
-120   * writefs are both set to be the same 
specified fs. 
+120   * writefs are both set to be the same 
specified fs.
 121   * Do not verify hbase-checksums while 
reading data from filesystem.
 122   * @param fs Set the noChecksumFs and 
writeFs to this specified filesystem.
 123   */

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.html
index 48e79b7..70ea204 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcFrameDecoder.html
@@ -103,146 +103,142 @@
 095}
 096
 097int frameLengthInt = (int) 
frameLength;
-098if (in.readableBytes()  
frameLengthInt) {
+098if (in.readableBytes()  
frameLengthInt + FRAME_LENGTH_FIELD_LENGTH) {
 099  return;
 100}
 101
 102
in.skipBytes(FRAME_LENGTH_FIELD_LENGTH);
 103
 104// extract frame
-105int readerIndex = in.readerIndex();
-106ByteBuf frame = 
in.retainedSlice(readerIndex, frameLengthInt);
-107in.readerIndex(readerIndex + 
frameLengthInt);
-108
-109out.add(frame);
-110  }
-111
-112  private void 
handleTooBigRequest(ByteBuf in) throws IOException {
-113in.markReaderIndex();
-114int preIndex = in.readerIndex();
-115int headerSize = 
readRawVarint32(in);
-116if (preIndex == in.readerIndex()) {
-117  return;
-118}
-119if (headerSize  0) {
-120  throw new IOException("negative 
headerSize: " + headerSize);
-121}
-122
-123if (in.readableBytes()  
headerSize) {
-124  in.resetReaderIndex();
-125  return;
-126}
-127
-128RPCProtos.RequestHeader header = 
getHeader(in, headerSize);
-129
-130// Notify the client about the 
offending request
-131NettyServerCall reqTooBig =
-132  new 
NettyServerCall(header.getCallId(), connection.service, null, null, null, 
null,
-133connection, 0, connection.addr, 
System.currentTimeMillis(), 0,
-134connection.rpcServer.reservoir, 
connection.rpcServer.cellBlockBuilder, null);
-135
-136

[04/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
index e1d6267..62a21f8 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Dependency Information
 
@@ -147,7 +147,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
index 3b44899..5b24842 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Project Dependency Management
 
@@ -775,18 +775,24 @@
 test-jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
+org.apache.hbase
+http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
+3.0.0-SNAPSHOT
+test-jar
+https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
+
 org.bouncycastle
 http://www.bouncycastle.org/java.html;>bcprov-jdk16
 1.46
 jar
 http://www.bouncycastle.org/licence.html;>Bouncy Castle 
Licence
-
+
 org.hamcrest
 https://github.com/hamcrest/JavaHamcrest/hamcrest-core;>hamcrest-core
 1.3
 jar
 http://www.opensource.org/licenses/bsd-license.php;>New BSD 
License
-
+
 org.mockito
 http://mockito.org;>mockito-core
 2.1.0
@@ -804,7 +810,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
index ba33fd7..c49993d 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
About
 
@@ -119,7 +119,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/integration.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/integration.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/integration.html
index f79eb3f..fe8398e 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/integration.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/integration.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
CI Management
 
@@ -126,7 +126,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
  

[21/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html
index 62bc799..5c004ce 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html
@@ -250,7 +250,7 @@
 242Cell kv = cell;
 243// null input == user explicitly 
wants to flush
 244if (row == null  kv == 
null) {
-245  rollWriters();
+245  rollWriters(null);
 246  return;
 247}
 248
@@ -284,636 +284,642 @@
 276  configureStoragePolicy(conf, 
fs, tableAndFamily, writerPath);
 277}
 278
-279// If any of the HFiles for the 
column families has reached
-280// maxsize, we need to roll all 
the writers
-281if (wl != null  
wl.written + length = maxsize) {
-282  this.rollRequested = true;
-283}
-284
-285// This can only happen once a 
row is finished though
-286if (rollRequested  
Bytes.compareTo(this.previousRow, rowKey) != 0) {
-287  rollWriters();
-288}
-289
-290// create a new WAL writer, if 
necessary
-291if (wl == null || wl.writer == 
null) {
-292  if 
(conf.getBoolean(LOCALITY_SENSITIVE_CONF_KEY, DEFAULT_LOCALITY_SENSITIVE)) {
-293HRegionLocation loc = null;
-294
-295String tableName = 
Bytes.toString(tableNameBytes);
-296if (tableName != null) {
-297  try (Connection connection 
= ConnectionFactory.createConnection(conf);
-298 RegionLocator 
locator =
-299   
connection.getRegionLocator(TableName.valueOf(tableName))) {
-300loc = 
locator.getRegionLocation(rowKey);
-301  } catch (Throwable e) {
-302LOG.warn("There's 
something wrong when locating rowkey: " +
-303  Bytes.toString(rowKey) 
+ " for tablename: " + tableName, e);
-304loc = null;
-305  } }
-306
-307if (null == loc) {
-308  if (LOG.isTraceEnabled()) 
{
-309LOG.trace("failed to get 
region location, so use default writer for rowkey: " +
-310  
Bytes.toString(rowKey));
-311  }
-312  wl = 
getNewWriter(tableNameBytes, family, conf, null);
-313} else {
-314  if (LOG.isDebugEnabled()) 
{
-315LOG.debug("first rowkey: 
[" + Bytes.toString(rowKey) + "]");
-316  }
-317  InetSocketAddress 
initialIsa =
-318  new 
InetSocketAddress(loc.getHostname(), loc.getPort());
-319  if 
(initialIsa.isUnresolved()) {
-320if (LOG.isTraceEnabled()) 
{
-321  LOG.trace("failed to 
resolve bind address: " + loc.getHostname() + ":"
-322  + loc.getPort() + 
", so use default writer");
-323}
-324wl = 
getNewWriter(tableNameBytes, family, conf, null);
-325  } else {
-326if (LOG.isDebugEnabled()) 
{
-327  LOG.debug("use favored 
nodes writer: " + initialIsa.getHostString());
-328}
-329wl = 
getNewWriter(tableNameBytes, family, conf, new InetSocketAddress[] { 
initialIsa
-330});
-331  }
-332}
-333  } else {
-334wl = 
getNewWriter(tableNameBytes, family, conf, null);
-335  }
-336}
-337
-338// we now have the proper WAL 
writer. full steam ahead
-339// TODO : Currently in 
SettableTimeStamp but this will also move to ExtendedCell
-340
PrivateCellUtil.updateLatestStamp(cell, this.now);
-341wl.writer.append(kv);
-342wl.written += length;
-343
-344// Copy the row so we know when a 
row transition.
-345this.previousRow = rowKey;
-346  }
-347
-348  private void rollWriters() throws 
IOException {
-349for (WriterLength wl : 
this.writers.values()) {
-350  if (wl.writer != null) {
-351LOG.info(
-352"Writer=" + 
wl.writer.getPath() + ((wl.written == 0)? "": ", wrote=" + wl.written));
-353close(wl.writer);
-354  }
-355  wl.writer = null;
-356  wl.written = 0;
-357}
-358this.rollRequested = false;
-359  }
-360
-361  /*
-362   * Create a new StoreFile.Writer.
-363   * @param family
-364   * @return A WriterLength, 
containing a new StoreFile.Writer.
-365   * @throws IOException
-366   */
-367  

[12/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/dependency-info.html
--
diff --git a/hbase-build-configuration/hbase-archetypes/dependency-info.html 
b/hbase-build-configuration/hbase-archetypes/dependency-info.html
index 5d94285..94a5728 100644
--- a/hbase-build-configuration/hbase-archetypes/dependency-info.html
+++ b/hbase-build-configuration/hbase-archetypes/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetypes  Dependency Information
 
@@ -148,7 +148,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/dependency-management.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/dependency-management.html 
b/hbase-build-configuration/hbase-archetypes/dependency-management.html
index 324589b..f7f3c3e 100644
--- a/hbase-build-configuration/hbase-archetypes/dependency-management.html
+++ b/hbase-build-configuration/hbase-archetypes/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetypes  Project Dependency 
Management
 
@@ -775,18 +775,24 @@
 test-jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
+org.apache.hbase
+http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
+3.0.0-SNAPSHOT
+test-jar
+https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
+
 org.bouncycastle
 http://www.bouncycastle.org/java.html;>bcprov-jdk16
 1.46
 jar
 http://www.bouncycastle.org/licence.html;>Bouncy Castle 
Licence
-
+
 org.hamcrest
 https://github.com/hamcrest/JavaHamcrest/hamcrest-core;>hamcrest-core
 1.3
 jar
 http://www.opensource.org/licenses/bsd-license.php;>New BSD 
License
-
+
 org.mockito
 http://mockito.org;>mockito-core
 2.1.0
@@ -804,7 +810,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependencies.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependencies.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependencies.html
index 3f41509..e3aa300 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependencies.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-archetype-builder/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetype builder  Project 
Dependencies
 
@@ -330,7 +330,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 



[08/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependency-convergence.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependency-convergence.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependency-convergence.html
index e273bdf..70187ae 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependency-convergence.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-client archetype  Reactor 
Dependency Convergence
 
@@ -488,22 +488,22 @@
 3.4.10
 
 
-org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.zookeeper:zookeeper:jar:3.4.10:compile|+-org.apache.hadoop:hadoop-common:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.had
 oop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAP
 SHOT:test|+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-minicluster:jar:2.7.4:test|+-org.apache.hadoop:hadoop-common:test-jar:tests:2.7.4:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.7.4:test|\-org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.7.4:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for dupli
 cate)+-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)\-org.apache.hbase:hbase-rsgroup:jar:3.0.0-SNAPSHOT:compile\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile 
- version managed from 3.4.6
 ; omitted for duplicate)
-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for dup
 licate)|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for 

[15/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/dependency-convergence.html
--
diff --git a/hbase-build-configuration/dependency-convergence.html 
b/hbase-build-configuration/dependency-convergence.html
index 9acdb2b..a5f5cf4 100644
--- a/hbase-build-configuration/dependency-convergence.html
+++ b/hbase-build-configuration/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Build Configuration  Reactor Dependency 
Convergence
 
@@ -488,22 +488,22 @@
 3.4.10
 
 
-org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.zookeeper:zookeeper:jar:3.4.10:compile|+-org.apache.hadoop:hadoop-common:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.had
 oop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAP
 SHOT:test|+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-minicluster:jar:2.7.4:test|+-org.apache.hadoop:hadoop-common:test-jar:tests:2.7.4:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.7.4:test|\-org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.7.4:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for dupli
 cate)+-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)\-org.apache.hbase:hbase-rsgroup:jar:3.0.0-SNAPSHOT:compile\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile 
- version managed from 3.4.6
 ; omitted for duplicate)
-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for dup
 licate)|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 
version managed from 3.4.6; omitted for duplicat
 

[13/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/dependency-convergence.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/dependency-convergence.html 
b/hbase-build-configuration/hbase-archetypes/dependency-convergence.html
index a626f4f..e56371c 100644
--- a/hbase-build-configuration/hbase-archetypes/dependency-convergence.html
+++ b/hbase-build-configuration/hbase-archetypes/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Archetypes  Reactor Dependency 
Convergence
 
@@ -488,22 +488,22 @@
 3.4.10
 
 
-org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.zookeeper:zookeeper:jar:3.4.10:compile|+-org.apache.hadoop:hadoop-common:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.had
 oop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAP
 SHOT:test|+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-minicluster:jar:2.7.4:test|+-org.apache.hadoop:hadoop-common:test-jar:tests:2.7.4:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.7.4:test|\-org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.7.4:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for dupli
 cate)+-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)\-org.apache.hbase:hbase-rsgroup:jar:3.0.0-SNAPSHOT:compile\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile 
- version managed from 3.4.6
 ; omitted for duplicate)
-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for dup
 licate)|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 
version managed from 3.4.6; omitted for duplicat
 

hbase-site git commit: INFRA-10751 Empty commit

2017-12-06 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site d171b8965 -> 116e12f36


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/116e12f3
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/116e12f3
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/116e12f3

Branch: refs/heads/asf-site
Commit: 116e12f367bc3af3b16ad1881d7c2854736c7dd2
Parents: d171b89
Author: jenkins 
Authored: Wed Dec 6 15:18:14 2017 +
Committer: jenkins 
Committed: Wed Dec 6 15:18:14 2017 +

--

--




[40/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/book.html
--
diff --git a/book.html b/book.html
index 8e71822..26813e2 100644
--- a/book.html
+++ b/book.html
@@ -5766,285 +5766,214 @@ Here are those configurations:
 
 Table 3. Configurations support dynamically 
change
 
-
-
+
 
 
 
 Key
-Default Value
 
 
 
 
 hbase.ipc.server.fallback-to-simple-auth-allowed
-false
 
 
 hbase.cleaner.scan.dir.concurrent.size
-0.5
 
 
 hbase.regionserver.thread.compaction.large
-1
 
 
 hbase.regionserver.thread.compaction.small
-1
 
 
 hbase.regionserver.thread.split
-1
 
 
 hbase.regionserver.throughput.controller
-PressureAwareCompactionThroughputController.class
 
 
 hbase.regionserver.thread.hfilecleaner.throttle
-64*1024*1024 (64M)
 
 
 hbase.regionserver.hfilecleaner.large.queue.size
-10240
 
 
 hbase.regionserver.hfilecleaner.small.queue.size
-10240
 
 
 hbase.regionserver.hfilecleaner.large.thread.count
-1
 
 
 hbase.regionserver.hfilecleaner.small.thread.count
-1
 
 
 hbase.regionserver.flush.throughput.controller
-NoLimitThroughputController.class
 
 
 hbase.hstore.compaction.max.size
-Long.MAX
 
 
 hbase.hstore.compaction.max.size.offpeak
-Long.MAX
 
 
 hbase.hstore.compaction.min.size
-1024*1024*128 (128M)
 
 
 hbase.hstore.compaction.min
-3
 
 
 hbase.hstore.compaction.max
-10
 
 
 hbase.hstore.compaction.ratio
-1.2f
 
 
 hbase.hstore.compaction.ratio.offpeak
-5.0f
 
 
 hbase.regionserver.thread.compaction.throttle
-2*10*1024*1024*128 (2560M)
 
 
 hbase.hregion.majorcompaction
-1000*60*60*24*7 (1 week)
 
 
 hbase.hregion.majorcompaction.jitter
-0.5f
 
 
 hbase.hstore.min.locality.to.skip.major.compact
-0.0f
 
 
 hbase.hstore.compaction.date.tiered.max.storefile.age.millis
-Long.MAX
 
 
 hbase.hstore.compaction.date.tiered.incoming.window.min
-6
 
 
 hbase.hstore.compaction.date.tiered.window.policy.class
-ExploringCompactionPolicy.class
 
 
 hbase.hstore.compaction.date.tiered.single.output.for.minor.compaction
-true
 
 
 hbase.hstore.compaction.date.tiered.window.factory.class
-ExponentialCompactionWindowFactory.class
 
 
 hbase.offpeak.start.hour
--1
 
 
 hbase.offpeak.end.hour
--1
 
 
 hbase.oldwals.cleaner.thread.size
-2
 
 
 hbase.procedure.worker.keep.alive.time.msec
-Long.MAX
 
 
 hbase.procedure.worker.add.stuck.percentage
-0.5f
 
 
 hbase.procedure.worker.monitor.interval.msec
-5000 (5 
seconds)
 
 
 hbase.procedure.worker.stuck.threshold.msec
-1 (10 
seconds)
 
 
 hbase.regions.slop
-0.2
 
 
 hbase.regions.overallSlop
-0.2
 
 
 hbase.balancer.tablesOnMaster
-false
 
 
 hbase.balancer.tablesOnMaster.systemTablesOnly
-false
 
 
 hbase.util.ip.to.rack.determiner
-ScriptBasedMapping.class
 
 
 hbase.ipc.server.max.callqueue.length
-10*30
 
 
 hbase.ipc.server.priority.max.callqueue.length
-10*30
 
 
 hbase.ipc.server.callqueue.type
-fifo
 
 
 hbase.ipc.server.callqueue.codel.target.delay
-100
 
 
 hbase.ipc.server.callqueue.codel.interval
-100
 
 
 hbase.ipc.server.callqueue.codel.lifo.threshold
-0.8
 
 
 hbase.master.balancer.stochastic.maxSteps
-100
 
 
 hbase.master.balancer.stochastic.stepsPerRegion
-800
 
 
 hbase.master.balancer.stochastic.maxRunningTime
-30*1000 
(30 seconds)
 
 
 hbase.master.balancer.stochastic.runMaxSteps
-false
 
 
 hbase.master.balancer.stochastic.numRegionLoadsToRemember
-15
 
 
 hbase.master.loadbalance.bytable
-false
 
 
 hbase.master.balancer.stochastic.minCostNeedBalance
-0.05f
 
 
 hbase.master.balancer.stochastic.localityCost
-25
 
 
 hbase.master.balancer.stochastic.rackLocalityCost
-15
 
 
 hbase.master.balancer.stochastic.readRequestCost
-5
 
 
 hbase.master.balancer.stochastic.writeRequestCost
-5
 
 
 hbase.master.balancer.stochastic.memstoreSizeCost
-5
 
 
 hbase.master.balancer.stochastic.storefileSizeCost
-5
 
 
 hbase.master.balancer.stochastic.regionReplicaHostCostKey
-10
 
 
 hbase.master.balancer.stochastic.regionReplicaRackCostKey
-1
 
 
 hbase.master.balancer.stochastic.regionCountCost
-500
 
 
 hbase.master.balancer.stochastic.primaryRegionCountCost
-500
 
 
 hbase.master.balancer.stochastic.moveCost
-7
 
 
 hbase.master.balancer.stochastic.maxMovePercent
-0.25f
 
 
 hbase.master.balancer.stochastic.tableSkewCost
-35
 
 
 
-
-For more details consult the patch attached to  https://issues.apache.org/jira/browse/HBASE-12147;>HBASE-12147 Porting 
Online Config Change from 89-fb.
-
 
 
 Upgrading
@@ -37080,7 +37009,7 @@ The server will return cellblocks compressed using this 
same compressor as long
 
 
 Version 3.0.0-SNAPSHOT
-Last updated 2017-12-05 14:29:40 UTC
+Last updated 2017-12-06 14:29:39 UTC
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/bulk-loads.html
--
diff --git a/bulk-loads.html b/bulk-loads.html
index b8d5f81..53f5cb4 100644
--- a/bulk-loads.html
+++ b/bulk-loads.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase   
   Bulk 

[39/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index e4218d1..4f806e4 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -286,10 +286,10 @@
 Warnings
 Errors
 
-3448
+3449
 0
 0
-20747
+20698
 
 Files
 
@@ -477,7 +477,7 @@
 org/apache/hadoop/hbase/HBaseCommonTestingUtility.java
 0
 0
-14
+13
 
 org/apache/hadoop/hbase/HBaseConfiguration.java
 0
@@ -492,7 +492,7 @@
 org/apache/hadoop/hbase/HBaseTestingUtility.java
 0
 0
-286
+266
 
 org/apache/hadoop/hbase/HColumnDescriptor.java
 0
@@ -1527,7 +1527,7 @@
 org/apache/hadoop/hbase/client/Append.java
 0
 0
-6
+4
 
 org/apache/hadoop/hbase/client/AsyncAdmin.java
 0
@@ -1772,7 +1772,7 @@
 org/apache/hadoop/hbase/client/Delete.java
 0
 0
-8
+6
 
 org/apache/hadoop/hbase/client/Get.java
 0
@@ -1822,7 +1822,7 @@
 org/apache/hadoop/hbase/client/Increment.java
 0
 0
-2
+1
 
 org/apache/hadoop/hbase/client/MasterCallable.java
 0
@@ -1862,7 +1862,7 @@
 org/apache/hadoop/hbase/client/Mutation.java
 0
 0
-21
+23
 
 org/apache/hadoop/hbase/client/NoOpRetryableCallerInterceptor.java
 0
@@ -1897,7 +1897,7 @@
 org/apache/hadoop/hbase/client/Put.java
 0
 0
-19
+9
 
 org/apache/hadoop/hbase/client/Query.java
 0
@@ -4682,7 +4682,7 @@
 org/apache/hadoop/hbase/ipc/AbstractTestIPC.java
 0
 0
-2
+3
 
 org/apache/hadoop/hbase/ipc/AdaptiveLifoCoDelCallQueue.java
 0
@@ -12669,46 +12669,16 @@
 0
 3
 
-org/apache/hadoop/hbase/zookeeper/TestHQuorumPeer.java
-0
-0
-1
-
 org/apache/hadoop/hbase/zookeeper/TestMetricsZooKeeperSourceImpl.java
 0
 0
 1
-
-org/apache/hadoop/hbase/zookeeper/TestRecoverableZooKeeper.java
-0
-0
-1
 
 org/apache/hadoop/hbase/zookeeper/TestZKConfig.java
 0
 0
 1
 
-org/apache/hadoop/hbase/zookeeper/TestZKLeaderManager.java
-0
-0
-2
-
-org/apache/hadoop/hbase/zookeeper/TestZKMainServer.java
-0
-0
-2
-
-org/apache/hadoop/hbase/zookeeper/TestZKNodeTracker.java
-0
-0
-6
-
-org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
-0
-0
-4
-
 org/apache/hadoop/hbase/zookeeper/ZKAclReset.java
 0
 0
@@ -12854,12 +12824,12 @@
 
 
 http://checkstyle.sourceforge.net/config_blocks.html#LeftCurly;>LeftCurly
-343
+342
 Error
 
 
 http://checkstyle.sourceforge.net/config_blocks.html#NeedBraces;>NeedBraces
-2235
+2232
 Error
 
 coding
@@ -12912,7 +12882,7 @@
 
 imports
 http://checkstyle.sourceforge.net/config_imports.html#AvoidStarImport;>AvoidStarImport
-134
+129
 Error
 
 
@@ -12922,7 +12892,7 @@
 sortStaticImportsAlphabetically: true
 groups: *,org.apache.hadoop.hbase.shaded
 option: top
-2521
+2509
 Error
 
 
@@ -12945,19 +12915,19 @@
 caseIndent: 2
 basicOffset: 2
 lineWrappingIndentation: 2
-6248
+6239
 Error
 
 javadoc
 http://checkstyle.sourceforge.net/config_javadoc.html#JavadocTagContinuationIndentation;>JavadocTagContinuationIndentation
 
 offset: 2
-861
+840
 Error
 
 
 http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription;>NonEmptyAtclauseDescription
-4315
+4320
 Error
 
 misc
@@ -12975,7 +12945,7 @@
 
 max: 100
 ignorePattern: ^package.*|^import.*|a 
href|href|http://|https://|ftp://|org.apache.thrift.|com.google.protobuf.|hbase.protobuf.generated
-1927
+1926
 Error
 
 
@@ -12995,7 +12965,7 @@
 
 
 http://checkstyle.sourceforge.net/config_whitespace.html#ParenPad;>ParenPad
-305
+303
 Error
 
 Details
@@ -14579,134 +14549,128 @@
 Line
 
 Error
-imports
-ImportOrder
-Wrong order for 'org.apache.hadoop.hbase.io.compress.Compression' 
import.
-33
-
-Error
 indentation
 Indentation
 'array initialization' child have incorrect indentation level 6, expected 
level should be 4.
-59
-
+60
+
 Error
 javadoc
 JavadocTagContinuationIndentation
 Line continuation have incorrect indentation level, expected level should 
be 2.
-99
-
+100
+
 Error
 javadoc
 JavadocTagContinuationIndentation
 Line continuation have incorrect indentation level, expected level should 
be 2.
-100
-
+101
+
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-110
-
+111
+
 Error
 javadoc
 JavadocTagContinuationIndentation
 Line continuation have incorrect indentation level, expected level should 
be 2.
-113
-
+114
+
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-134
-
+135
+
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-152
-
+153
+
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-167
-
+168
+
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-180
-
+181
+
 Error
 javadoc
 JavadocTagContinuationIndentation
 Line continuation have incorrect indentation level, expected level should 
be 2.
-192
-
+193
+
 Error
 javadoc
 JavadocTagContinuationIndentation
 Line continuation have incorrect indentation level, expected level should 
be 2.

[44/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/apidocs/src-html/org/apache/hadoop/hbase/client/Increment.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/Increment.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/Increment.html
index 312947a..b283abc 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/Increment.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/Increment.html
@@ -32,317 +32,322 @@
 024import java.util.NavigableMap;
 025import java.util.TreeMap;
 026import java.util.UUID;
-027
-028import org.apache.hadoop.hbase.Cell;
-029import 
org.apache.hadoop.hbase.CellUtil;
-030import 
org.apache.hadoop.hbase.KeyValue;
-031import 
org.apache.yetus.audience.InterfaceAudience;
-032import 
org.apache.hadoop.hbase.io.TimeRange;
-033import 
org.apache.hadoop.hbase.security.access.Permission;
-034import 
org.apache.hadoop.hbase.security.visibility.CellVisibility;
-035import 
org.apache.hadoop.hbase.util.Bytes;
-036import 
org.apache.hadoop.hbase.util.ClassSize;
-037
-038/**
-039 * Used to perform Increment operations 
on a single row.
-040 * p
-041 * This operation ensures atomicity to 
readers. Increments are done
-042 * under a single row lock, so write 
operations to a row are synchronized, and
-043 * readers are guaranteed to see this 
operation fully completed.
-044 * p
-045 * To increment columns of a row, 
instantiate an Increment object with the row
-046 * to increment.  At least one column to 
increment must be specified using the
-047 * {@link #addColumn(byte[], byte[], 
long)} method.
-048 */
-049@InterfaceAudience.Public
-050public class Increment extends Mutation 
implements ComparableRow {
-051  private static final long HEAP_OVERHEAD 
=  ClassSize.REFERENCE + ClassSize.TIMERANGE;
-052  private TimeRange tr = new 
TimeRange();
-053
-054  /**
-055   * Create a Increment operation for the 
specified row.
-056   * p
-057   * At least one column must be 
incremented.
-058   * @param row row key (we will make a 
copy of this).
-059   */
-060  public Increment(byte [] row) {
-061this(row, 0, row.length);
-062  }
-063
-064  /**
-065   * Create a Increment operation for the 
specified row.
-066   * p
-067   * At least one column must be 
incremented.
-068   * @param row row key (we will make a 
copy of this).
-069   */
-070  public Increment(final byte [] row, 
final int offset, final int length) {
-071checkRow(row, offset, length);
-072this.row = Bytes.copy(row, offset, 
length);
-073  }
-074  /**
-075   * Copy constructor
-076   * @param i
-077   */
-078  public Increment(Increment i) {
-079this.row = i.getRow();
-080this.ts = i.getTimeStamp();
-081this.tr = i.getTimeRange();
-082
this.familyMap.putAll(i.getFamilyCellMap());
-083for (Map.EntryString, byte[] 
entry : i.getAttributesMap().entrySet()) {
-084  this.setAttribute(entry.getKey(), 
entry.getValue());
-085}
-086super.setPriority(i.getPriority());
-087  }
-088
-089  /**
-090   * Add the specified KeyValue to this 
operation.
-091   * @param cell individual Cell
-092   * @return this
-093   * @throws java.io.IOException e
-094   */
-095  public Increment add(Cell cell) throws 
IOException{
-096byte [] family = 
CellUtil.cloneFamily(cell);
-097ListCell list = 
getCellList(family);
-098//Checking that the row of the kv is 
the same as the put
-099if (!CellUtil.matchingRows(cell, 
this.row)) {
-100  throw new WrongRowIOException("The 
row in " + cell +
-101" doesn't match the original one 
" +  Bytes.toStringBinary(this.row));
-102}
-103list.add(cell);
-104return this;
-105  }
-106
-107  /**
-108   * Increment the column from the 
specific family with the specified qualifier
-109   * by the specified amount.
-110   * p
-111   * Overrides previous calls to 
addColumn for this family and qualifier.
-112   * @param family family name
-113   * @param qualifier column qualifier
-114   * @param amount amount to increment 
by
-115   * @return the Increment object
-116   */
-117  public Increment addColumn(byte [] 
family, byte [] qualifier, long amount) {
-118if (family == null) {
-119  throw new 
IllegalArgumentException("family cannot be null");
-120}
-121ListCell list = 
getCellList(family);
-122KeyValue kv = 
createPutKeyValue(family, qualifier, ts, Bytes.toBytes(amount));
-123list.add(kv);
-124return this;
-125  }
-126
-127  /**
-128   * Gets the TimeRange used for this 
increment.
-129   * @return TimeRange
-130   */
-131  public TimeRange getTimeRange() {
-132return this.tr;
-133  }
-134
-135  /**
-136   * Sets the TimeRange to be used on the 
Get for this increment.
-137   * p
-138   * This is useful for when you have 
counters that only last for specific
-139   * periods of time (ie. counters that 
are partitioned by time).  By setting
-140   * the range of valid times for this 
increment, you can 

[07/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependency-info.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependency-info.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependency-info.html
index c0771ec..fa6927a 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependency-info.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-client archetype  
Dependency Information
 
@@ -147,7 +147,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependency-management.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependency-management.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependency-management.html
index fdd16aa..3c1da71 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependency-management.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-client-project/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-client archetype  Project 
Dependency Management
 
@@ -775,18 +775,24 @@
 test-jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
+org.apache.hbase
+http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
+3.0.0-SNAPSHOT
+test-jar
+https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
+
 org.bouncycastle
 http://www.bouncycastle.org/java.html;>bcprov-jdk16
 1.46
 jar
 http://www.bouncycastle.org/licence.html;>Bouncy Castle 
Licence
-
+
 org.hamcrest
 https://github.com/hamcrest/JavaHamcrest/hamcrest-core;>hamcrest-core
 1.3
 jar
 http://www.opensource.org/licenses/bsd-license.php;>New BSD 
License
-
+
 org.mockito
 http://mockito.org;>mockito-core
 2.1.0
@@ -804,7 +810,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-client-project/index.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-client-project/index.html 
b/hbase-build-configuration/hbase-archetypes/hbase-client-project/index.html
index 2e2bdd1..943db8b 100644
--- a/hbase-build-configuration/hbase-archetypes/hbase-client-project/index.html
+++ b/hbase-build-configuration/hbase-archetypes/hbase-client-project/index.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-client archetype  
About
 
@@ -119,7 +119,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-client-project/integration.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-client-project/integration.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-client-project/integration.html
index 232443d..c65702d 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-client-project/integration.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-client-project/integration.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-client archetype  CI 
Management
 
@@ -126,7 +126,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-12-05
+  Last Published: 
2017-12-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-client-project/issue-tracking.html

[03/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-spark/dependencies.html
--
diff --git a/hbase-build-configuration/hbase-spark/dependencies.html 
b/hbase-build-configuration/hbase-spark/dependencies.html
index be3032c..1997852 100644
--- a/hbase-build-configuration/hbase-spark/dependencies.html
+++ b/hbase-build-configuration/hbase-spark/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Spark  Project Dependencies
 
@@ -196,12 +196,18 @@
 jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
+org.apache.hbase
+http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
+3.0.0-SNAPSHOT
+jar
+https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
+
 org.apache.hbase.thirdparty
 http://hbase.apache.org/hbase-shaded-miscellaneous;>hbase-shaded-miscellaneous
 1.0.1
 jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
-
+
 org.apache.yetus
 https://yetus.apache.org/audience-annotations;>audience-annotations
 0.5.0
@@ -282,20 +288,27 @@
 test-jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
+org.apache.hbase
+http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
+3.0.0-SNAPSHOT
+tests
+test-jar
+https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
+
 org.apache.spark
 http://spark.apache.org/;>spark-streaming_2.10
 1.6.0
 tests
 test-jar
 http://www.apache.org/licenses/LICENSE-2.0.html;>Apache 2.0 
License
-
+
 org.scalamock
 http://scalamock.org/;>scalamock-scalatest-support_2.10
 3.1.4
 -
 jar
 http://www.opensource.org/licenses/bsd-license.php;>BSD-style
-
+
 org.scalatest
 http://www.scalatest.org;>scalatest_2.10
 2.2.4
@@ -800,294 +813,288 @@
 jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
-org.apache.hbase
-http://hbase.apache.org/hbase-build-configuration/hbase-zookeeper;>hbase-zookeeper
-3.0.0-SNAPSHOT
-jar
-https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
-
 org.apache.hbase.thirdparty
 http://hbase.apache.org/hbase-shaded-netty;>hbase-shaded-netty
 1.0.1
 jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
-
+
 org.apache.hbase.thirdparty
 http://hbase.apache.org/hbase-shaded-protobuf;>hbase-shaded-protobuf
 1.0.1
 jar
 https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
-
+
 org.apache.htrace
 http://incubator.apache.org/projects/htrace.html;>htrace-core
 3.2.0-incubating
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0
-
+
 org.apache.htrace
 http://incubator.apache.org/projects/htrace.html;>htrace-core4
 4.2.0-incubating
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
-
+
 org.apache.httpcomponents
 http://hc.apache.org/httpcomponents-client;>httpclient
 4.5.3
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
-
+
 org.apache.httpcomponents
 http://hc.apache.org/httpcomponents-core-ga;>httpcore
 4.4.6
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
-
+
 org.apache.zookeeper
 zookeeper
 3.4.10
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0
-
+
 org.codehaus.jackson
 http://jackson.codehaus.org;>jackson-core-asl
 1.9.13
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0
-
+
 org.codehaus.jackson
 http://jackson.codehaus.org;>jackson-jaxrs
 1.8.3
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0-http://www.fsf.org/licensing/licenses/lgpl.txt;>GNU Lesser General Public 
License (LGPL), Version 2.1
-
+
 org.codehaus.jackson
 http://jackson.codehaus.org;>jackson-mapper-asl
 1.9.13
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0
-
+
 org.codehaus.jackson
 http://jackson.codehaus.org;>jackson-xc
 1.8.3
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0-http://www.fsf.org/licensing/licenses/lgpl.txt;>GNU Lesser General Public 
License (LGPL), Version 2.1
-
+
 org.codehaus.jettison
 https://github.com/jettison-json/jettison;>jettison
 1.3.8
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache License, Version 
2.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-http
 9.3.19.v20170502
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 2.0-http://www.eclipse.org/org/documents/epl-v10.php;>Eclipse Public License 
- Version 1.0
-
+
 org.eclipse.jetty
 http://www.eclipse.org/jetty;>jetty-io
 9.3.19.v20170502
 jar
 http://www.apache.org/licenses/LICENSE-2.0;>Apache Software License - 
Version 

[05/51] [partial] hbase-site git commit: Published site at .

2017-12-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/d171b896/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
index 9dfdaa4..9cb6590 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
Reactor Dependency Convergence
 
@@ -488,22 +488,22 @@
 3.4.10
 
 
-org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.zookeeper:zookeeper:jar:3.4.10:compile|+-org.apache.hadoop:hadoop-common:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.had
 oop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-mapreduce:test-jar:tests:3.0.0-SNAPSHOT:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-testing-util:jar:3.0.0-SNAP
 SHOT:test|+-org.apache.hbase:hbase-server:test-jar:tests:3.0.0-SNAPSHOT:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-minicluster:jar:2.7.4:test|+-org.apache.hadoop:hadoop-common:test-jar:tests:2.7.4:test||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-yarn-server-tests:test-jar:tests:2.7.4:test|\-org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:2.7.4:test|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:test
 - version managed from 3.4.6; omitted for dupli
 cate)+-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)\-org.apache.hbase:hbase-rsgroup:jar:3.0.0-SNAPSHOT:compile\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile 
- version managed from 3.4.6
 ; omitted for duplicate)
-org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT+-org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT:compile|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-auth:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)+-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile|+-org.apache.hbase:hbase-zookeeper:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for duplicate)|+-org.apache.hbase:hbase-replication:jar:3.0.0-SNAPSHOT:compile||\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for dup
 licate)|+-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile - 
version managed from 3.4.6; omitted for duplicate)|\-org.apache.hadoop:hadoop-client:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-app:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-mapreduce-client-common:jar:2.7.4:compile|\-org.apache.hadoop:hadoop-yarn-server-common:jar:2.7.4:compile|\-(org.apache.zookeeper:zookeeper:jar:3.4.10:compile
 - version managed from 3.4.6; omitted for