[4/6] hbase git commit: HBASE-19547 HBase fails building on AArch64 due to asciidoctor-maven-plugin

2018-04-20 Thread busbey
HBASE-19547 HBase fails building on AArch64 due to asciidoctor-maven-plugin

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/51069e6e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/51069e6e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/51069e6e

Branch: refs/heads/branch-2.0
Commit: 51069e6e83e58902ca9c3ebb957b80f65627a924
Parents: 0b4f3d1
Author: Yuqi Gu 
Authored: Mon Dec 18 09:13:38 2017 +
Committer: Sean Busbey 
Committed: Fri Apr 20 22:45:10 2018 -0500

--
 pom.xml | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/51069e6e/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 7c949dd..983dc3c 100755
--- a/pom.xml
+++ b/pom.xml
@@ -1153,6 +1153,11 @@
 asciidoctorj-pdf
 ${asciidoctorj.pdf.version}
   
+  
+org.jruby
+jruby-complete
+${jruby.version}
+  
 
 
   
${project.reporting.outputDirectory}/



[3/6] hbase git commit: HBASE-19547 HBase fails building on AArch64 due to asciidoctor-maven-plugin

2018-04-20 Thread busbey
HBASE-19547 HBase fails building on AArch64 due to asciidoctor-maven-plugin

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cb1aaa68
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cb1aaa68
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cb1aaa68

Branch: refs/heads/branch-2
Commit: cb1aaa683b4fd6cc024e55eb6d1929b06b57702c
Parents: ddf8b2a
Author: Yuqi Gu 
Authored: Mon Dec 18 09:13:38 2017 +
Committer: Sean Busbey 
Committed: Fri Apr 20 22:44:24 2018 -0500

--
 pom.xml | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cb1aaa68/pom.xml
--
diff --git a/pom.xml b/pom.xml
index c8bc1f4..7573d2b 100755
--- a/pom.xml
+++ b/pom.xml
@@ -1154,6 +1154,11 @@
 asciidoctorj-pdf
 ${asciidoctorj.pdf.version}
   
+  
+org.jruby
+jruby-complete
+${jruby.version}
+  
 
 
   
${project.reporting.outputDirectory}/



[6/6] hbase git commit: HBASE-20406 HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods

2018-04-20 Thread busbey
HBASE-20406 HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods

Signed-off-by: Josh Elser 
Signed-off-by: Ted Yu 
Signed-off-by: Sean Busbey 

 Conflicts:

hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java

hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e6018903
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e6018903
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e6018903

Branch: refs/heads/branch-1
Commit: e60189035e1974226f6176be52f29dff79a1fb18
Parents: eacf3cb
Author: Kevin Risden 
Authored: Thu Apr 12 21:08:15 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 22:45:49 2018 -0500

--
 .../hadoop/hbase/http/TestHttpServer.java   | 13 ++--
 .../hadoop/hbase/thrift/ThriftServerRunner.java |  2 ++
 .../hbase/thrift/TestThriftHttpServer.java  | 21 
 3 files changed, 30 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e6018903/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
index cec3fd1..2cb6cb4 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -608,8 +608,6 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 myServer.stop();
   }
 
-
-
   @Test
   public void testNoCacheHeader() throws Exception {
 URL url = new URL(baseUrl, "/echo?a=b=d");
@@ -634,4 +632,15 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 .build();
 s.stop();
   }
+
+  @Test
+  public void testHttpMethods() throws Exception {
+// HTTP TRACE method should be disabled for security
+// See https://www.owasp.org/index.php/Cross_Site_Tracing
+URL url = new URL(baseUrl, "/echo?a=b");
+HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+conn.setRequestMethod("TRACE");
+conn.connect();
+assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode());
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/e6018903/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index 07c18a7..8292e91 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -100,6 +100,7 @@ import org.apache.hadoop.hbase.thrift.generated.TScan;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ConnectionCache;
 import org.apache.hadoop.hbase.util.DNS;
+import org.apache.hadoop.hbase.util.HttpServerUtil;
 import org.apache.hadoop.hbase.util.JvmPauseMonitor;
 import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
@@ -422,6 +423,7 @@ public class ThriftServerRunner implements Runnable {
 String httpPath = "/*";
 httpServer.setHandler(context);
 context.addServlet(new ServletHolder(thriftHttpServlet), httpPath);
+HttpServerUtil.constrainHttpMethods(context);
 
 // set up Jetty and run the embedded server
 Connector connector = new SelectChannelConnector();

http://git-wip-us.apache.org/repos/asf/hbase/blob/e6018903/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
--
diff --git 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
index cf14e87..ed91a29 100644
--- 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
+++ 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
@@ -22,6 +22,8 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.fail;
 
+import java.net.HttpURLConnection;
+import java.net.URL;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -40,6 +42,7 @@ import 

[2/6] hbase git commit: HBASE-20406 HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods

2018-04-20 Thread busbey
HBASE-20406 HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods

Signed-off-by: Josh Elser 
Signed-off-by: Ted Yu 
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ddf8b2a2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ddf8b2a2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ddf8b2a2

Branch: refs/heads/branch-2
Commit: ddf8b2a2c43c3da3b3187b2e9b9ebd003ec8b441
Parents: 1546613
Author: Kevin Risden 
Authored: Thu Apr 12 21:08:15 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 22:44:01 2018 -0500

--
 .../hadoop/hbase/http/TestHttpServer.java   | 13 ++--
 .../hadoop/hbase/thrift/ThriftServerRunner.java |  2 ++
 .../hbase/thrift/TestThriftHttpServer.java  | 21 
 3 files changed, 30 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ddf8b2a2/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
--
diff --git 
a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java 
b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
index 16350d5..10553da 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -605,8 +605,6 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 myServer.stop();
   }
 
-
-
   @Test
   public void testNoCacheHeader() throws Exception {
 URL url = new URL(baseUrl, "/echo?a=b=d");
@@ -619,4 +617,15 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 assertEquals(conn.getHeaderField("Expires"), conn.getHeaderField("Date"));
 assertEquals("DENY", conn.getHeaderField("X-Frame-Options"));
   }
+
+  @Test
+  public void testHttpMethods() throws Exception {
+// HTTP TRACE method should be disabled for security
+// See https://www.owasp.org/index.php/Cross_Site_Tracing
+URL url = new URL(baseUrl, "/echo?a=b");
+HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+conn.setRequestMethod("TRACE");
+conn.connect();
+assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode());
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/ddf8b2a2/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index 16894ad..5d887f9 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -79,6 +79,7 @@ import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.ParseFilter;
 import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.filter.WhileMatchFilter;
+import org.apache.hadoop.hbase.http.HttpServerUtil;
 import org.apache.hadoop.hbase.log.HBaseMarkers;
 import org.apache.hadoop.hbase.security.SaslUtil;
 import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
@@ -445,6 +446,7 @@ public class ThriftServerRunner implements Runnable {
 // Context handler
 ServletContextHandler ctxHandler = new ServletContextHandler(httpServer, 
"/", ServletContextHandler.SESSIONS);
 ctxHandler.addServlet(new ServletHolder(thriftHttpServlet), "/*");
+HttpServerUtil.constrainHttpMethods(ctxHandler);
 
 // set up Jetty and run the embedded server
 HttpConfiguration httpConfig = new HttpConfiguration();

http://git-wip-us.apache.org/repos/asf/hbase/blob/ddf8b2a2/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
--
diff --git 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
index bd156bc..c3fecf6 100644
--- 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
+++ 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
@@ -21,6 +21,8 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.fail;
 
+import java.net.HttpURLConnection;
+import java.net.URL;
 import java.util.ArrayList;
 import java.util.List;
 import 

[5/6] hbase git commit: Revert "HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods"

2018-04-20 Thread busbey
Revert "HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods"

This reverts commit fe84833ea22c30b68022203132706ebb1e526852.

missing jira key


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/eacf3cb2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/eacf3cb2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/eacf3cb2

Branch: refs/heads/branch-1
Commit: eacf3cb29641af1a68978d9bd7654f643a3aa3a1
Parents: fe84833
Author: Sean Busbey 
Authored: Fri Apr 20 22:45:37 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 22:45:37 2018 -0500

--
 .../hadoop/hbase/http/TestHttpServer.java   | 13 ++--
 .../hadoop/hbase/thrift/ThriftServerRunner.java |  2 --
 .../hbase/thrift/TestThriftHttpServer.java  | 21 
 3 files changed, 6 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/eacf3cb2/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
index 2cb6cb4..cec3fd1 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -608,6 +608,8 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 myServer.stop();
   }
 
+
+
   @Test
   public void testNoCacheHeader() throws Exception {
 URL url = new URL(baseUrl, "/echo?a=b=d");
@@ -632,15 +634,4 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 .build();
 s.stop();
   }
-
-  @Test
-  public void testHttpMethods() throws Exception {
-// HTTP TRACE method should be disabled for security
-// See https://www.owasp.org/index.php/Cross_Site_Tracing
-URL url = new URL(baseUrl, "/echo?a=b");
-HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-conn.setRequestMethod("TRACE");
-conn.connect();
-assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode());
-  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/eacf3cb2/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index 8292e91..07c18a7 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -100,7 +100,6 @@ import org.apache.hadoop.hbase.thrift.generated.TScan;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ConnectionCache;
 import org.apache.hadoop.hbase.util.DNS;
-import org.apache.hadoop.hbase.util.HttpServerUtil;
 import org.apache.hadoop.hbase.util.JvmPauseMonitor;
 import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
@@ -423,7 +422,6 @@ public class ThriftServerRunner implements Runnable {
 String httpPath = "/*";
 httpServer.setHandler(context);
 context.addServlet(new ServletHolder(thriftHttpServlet), httpPath);
-HttpServerUtil.constrainHttpMethods(context);
 
 // set up Jetty and run the embedded server
 Connector connector = new SelectChannelConnector();

http://git-wip-us.apache.org/repos/asf/hbase/blob/eacf3cb2/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
--
diff --git 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
index ed91a29..cf14e87 100644
--- 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
+++ 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
@@ -22,8 +22,6 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.fail;
 
-import java.net.HttpURLConnection;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -42,7 +40,6 @@ import org.apache.thrift.protocol.TProtocol;
 import org.apache.thrift.transport.THttpClient;
 import org.apache.thrift.transport.TTransportException;
 import org.junit.AfterClass;
-import org.junit.Assert;
 import org.junit.BeforeClass;
 import 

[1/6] hbase git commit: Revert "HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods"

2018-04-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1 fe84833ea -> e60189035
  refs/heads/branch-2 298ce9624 -> cb1aaa683
  refs/heads/branch-2.0 0b4f3d1f5 -> 51069e6e8


Revert "HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods"

This reverts commit 05f8e94191ef6a63baadf56d6114d7d0317796f2.

missing jira key


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1546613e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1546613e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1546613e

Branch: refs/heads/branch-2
Commit: 1546613e76b1013a08ebc179c2c22bfeb44f3a4a
Parents: 298ce96
Author: Sean Busbey 
Authored: Fri Apr 20 22:43:25 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 22:43:56 2018 -0500

--
 .../hadoop/hbase/http/TestHttpServer.java   | 13 ++--
 .../hadoop/hbase/thrift/ThriftServerRunner.java |  2 --
 .../hbase/thrift/TestThriftHttpServer.java  | 21 
 3 files changed, 6 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1546613e/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
--
diff --git 
a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java 
b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
index 10553da..16350d5 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -605,6 +605,8 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 myServer.stop();
   }
 
+
+
   @Test
   public void testNoCacheHeader() throws Exception {
 URL url = new URL(baseUrl, "/echo?a=b=d");
@@ -617,15 +619,4 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 assertEquals(conn.getHeaderField("Expires"), conn.getHeaderField("Date"));
 assertEquals("DENY", conn.getHeaderField("X-Frame-Options"));
   }
-
-  @Test
-  public void testHttpMethods() throws Exception {
-// HTTP TRACE method should be disabled for security
-// See https://www.owasp.org/index.php/Cross_Site_Tracing
-URL url = new URL(baseUrl, "/echo?a=b");
-HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-conn.setRequestMethod("TRACE");
-conn.connect();
-assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode());
-  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/1546613e/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index 5d887f9..16894ad 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -79,7 +79,6 @@ import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.ParseFilter;
 import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.filter.WhileMatchFilter;
-import org.apache.hadoop.hbase.http.HttpServerUtil;
 import org.apache.hadoop.hbase.log.HBaseMarkers;
 import org.apache.hadoop.hbase.security.SaslUtil;
 import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
@@ -446,7 +445,6 @@ public class ThriftServerRunner implements Runnable {
 // Context handler
 ServletContextHandler ctxHandler = new ServletContextHandler(httpServer, 
"/", ServletContextHandler.SESSIONS);
 ctxHandler.addServlet(new ServletHolder(thriftHttpServlet), "/*");
-HttpServerUtil.constrainHttpMethods(ctxHandler);
 
 // set up Jetty and run the embedded server
 HttpConfiguration httpConfig = new HttpConfiguration();

http://git-wip-us.apache.org/repos/asf/hbase/blob/1546613e/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
--
diff --git 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
index c3fecf6..bd156bc 100644
--- 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
+++ 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
@@ -21,8 +21,6 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.fail;
 
-import 

[1/3] hbase git commit: HBASE-19547 HBase fails building on AArch64 due to asciidoctor-maven-plugin

2018-04-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/master e22f9e2d9 -> 46cb5dfa2


HBASE-19547 HBase fails building on AArch64 due to asciidoctor-maven-plugin

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e5fb3325
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e5fb3325
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e5fb3325

Branch: refs/heads/master
Commit: e5fb33259287d7b9c1b24f6adee28159b49ddeed
Parents: e22f9e2
Author: Yuqi Gu 
Authored: Mon Dec 18 09:13:38 2017 +
Committer: Sean Busbey 
Committed: Fri Apr 20 22:41:26 2018 -0500

--
 pom.xml | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e5fb3325/pom.xml
--
diff --git a/pom.xml b/pom.xml
index f8f1150..7aede4a 100755
--- a/pom.xml
+++ b/pom.xml
@@ -1261,6 +1261,11 @@
 asciidoctorj-pdf
 ${asciidoctorj.pdf.version}
   
+  
+org.jruby
+jruby-complete
+${jruby.version}
+  
 
 
   
${project.reporting.outputDirectory}/



[2/3] hbase git commit: Revert "HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods"

2018-04-20 Thread busbey
Revert "HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods"

This reverts commit 273d252838e96c4b4af2401743d84e482c4ec565.

missing jira id


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/eb3f5b28
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/eb3f5b28
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/eb3f5b28

Branch: refs/heads/master
Commit: eb3f5b2812cfe030690d5d22755f7809566d31a6
Parents: e5fb332
Author: Sean Busbey 
Authored: Fri Apr 20 22:41:50 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 22:41:50 2018 -0500

--
 .../hadoop/hbase/http/TestHttpServer.java   | 13 ++--
 .../hadoop/hbase/thrift/ThriftServerRunner.java |  2 --
 .../hbase/thrift/TestThriftHttpServer.java  | 21 
 3 files changed, 6 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/eb3f5b28/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
--
diff --git 
a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java 
b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
index 10553da..16350d5 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -605,6 +605,8 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 myServer.stop();
   }
 
+
+
   @Test
   public void testNoCacheHeader() throws Exception {
 URL url = new URL(baseUrl, "/echo?a=b=d");
@@ -617,15 +619,4 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 assertEquals(conn.getHeaderField("Expires"), conn.getHeaderField("Date"));
 assertEquals("DENY", conn.getHeaderField("X-Frame-Options"));
   }
-
-  @Test
-  public void testHttpMethods() throws Exception {
-// HTTP TRACE method should be disabled for security
-// See https://www.owasp.org/index.php/Cross_Site_Tracing
-URL url = new URL(baseUrl, "/echo?a=b");
-HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-conn.setRequestMethod("TRACE");
-conn.connect();
-assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode());
-  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/eb3f5b28/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index 28ba28a..39ea259 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -79,7 +79,6 @@ import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.ParseFilter;
 import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.filter.WhileMatchFilter;
-import org.apache.hadoop.hbase.http.HttpServerUtil;
 import org.apache.hadoop.hbase.log.HBaseMarkers;
 import org.apache.hadoop.hbase.security.SaslUtil;
 import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
@@ -449,7 +448,6 @@ public class ThriftServerRunner implements Runnable {
 ServletContextHandler ctxHandler = new ServletContextHandler(httpServer, 
"/",
 ServletContextHandler.SESSIONS);
 ctxHandler.addServlet(new ServletHolder(thriftHttpServlet), "/*");
-HttpServerUtil.constrainHttpMethods(ctxHandler);
 
 // set up Jetty and run the embedded server
 HttpConfiguration httpConfig = new HttpConfiguration();

http://git-wip-us.apache.org/repos/asf/hbase/blob/eb3f5b28/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
--
diff --git 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
index 6117953..d583234 100644
--- 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
+++ 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
@@ -21,8 +21,6 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.fail;
 
-import java.net.HttpURLConnection;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.List;
 import org.apache.hadoop.conf.Configuration;
@@ -40,7 +38,6 @@ import 

[3/3] hbase git commit: HBASE-20406 HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods

2018-04-20 Thread busbey
HBASE-20406 HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods

Signed-off-by: Josh Elser 
Signed-off-by: Ted Yu 
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/46cb5dfa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/46cb5dfa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/46cb5dfa

Branch: refs/heads/master
Commit: 46cb5dfa226892fd2580f26ce9ce77225bd7e67c
Parents: eb3f5b2
Author: Kevin Risden 
Authored: Thu Apr 12 21:08:15 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 22:42:03 2018 -0500

--
 .../hadoop/hbase/http/TestHttpServer.java   | 13 ++--
 .../hadoop/hbase/thrift/ThriftServerRunner.java |  2 ++
 .../hbase/thrift/TestThriftHttpServer.java  | 21 
 3 files changed, 30 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/46cb5dfa/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
--
diff --git 
a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java 
b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
index 16350d5..10553da 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -605,8 +605,6 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 myServer.stop();
   }
 
-
-
   @Test
   public void testNoCacheHeader() throws Exception {
 URL url = new URL(baseUrl, "/echo?a=b=d");
@@ -619,4 +617,15 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 assertEquals(conn.getHeaderField("Expires"), conn.getHeaderField("Date"));
 assertEquals("DENY", conn.getHeaderField("X-Frame-Options"));
   }
+
+  @Test
+  public void testHttpMethods() throws Exception {
+// HTTP TRACE method should be disabled for security
+// See https://www.owasp.org/index.php/Cross_Site_Tracing
+URL url = new URL(baseUrl, "/echo?a=b");
+HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+conn.setRequestMethod("TRACE");
+conn.connect();
+assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode());
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/46cb5dfa/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index 39ea259..28ba28a 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -79,6 +79,7 @@ import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.ParseFilter;
 import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.filter.WhileMatchFilter;
+import org.apache.hadoop.hbase.http.HttpServerUtil;
 import org.apache.hadoop.hbase.log.HBaseMarkers;
 import org.apache.hadoop.hbase.security.SaslUtil;
 import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
@@ -448,6 +449,7 @@ public class ThriftServerRunner implements Runnable {
 ServletContextHandler ctxHandler = new ServletContextHandler(httpServer, 
"/",
 ServletContextHandler.SESSIONS);
 ctxHandler.addServlet(new ServletHolder(thriftHttpServlet), "/*");
+HttpServerUtil.constrainHttpMethods(ctxHandler);
 
 // set up Jetty and run the embedded server
 HttpConfiguration httpConfig = new HttpConfiguration();

http://git-wip-us.apache.org/repos/asf/hbase/blob/46cb5dfa/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
--
diff --git 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
index d583234..6117953 100644
--- 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
+++ 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
@@ -21,6 +21,8 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.fail;
 
+import java.net.HttpURLConnection;
+import java.net.URL;
 import java.util.ArrayList;
 import java.util.List;
 import 

hbase git commit: HBASE-19924 hbase rpc throttling does not work for multi() with request count rater.

2018-04-20 Thread huaxiangsun
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 19b0349a3 -> 0b4f3d1f5


HBASE-19924 hbase rpc throttling does not work for multi() with request count 
rater.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0b4f3d1f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0b4f3d1f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0b4f3d1f

Branch: refs/heads/branch-2.0
Commit: 0b4f3d1f51dfec006abc9fcdb2efa2c117c8b501
Parents: 19b0349
Author: Huaxiang Sun 
Authored: Fri Apr 20 16:54:03 2018 -0700
Committer: Huaxiang Sun 
Committed: Fri Apr 20 17:00:37 2018 -0700

--
 .../hbase/quotas/DefaultOperationQuota.java |  4 +--
 .../hadoop/hbase/quotas/NoopQuotaLimiter.java   |  6 ++--
 .../hadoop/hbase/quotas/QuotaLimiter.java   | 12 ---
 .../hadoop/hbase/quotas/TimeBasedLimiter.java   | 37 ++-
 .../hadoop/hbase/quotas/TestQuotaState.java | 38 +---
 5 files changed, 66 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0b4f3d1f/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
index 80b39a8..1265a42 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
@@ -69,13 +69,13 @@ public class DefaultOperationQuota implements 
OperationQuota {
 for (final QuotaLimiter limiter: limiters) {
   if (limiter.isBypass()) continue;
 
-  limiter.checkQuota(writeConsumed, readConsumed);
+  limiter.checkQuota(numWrites, writeConsumed, numReads + numScans, 
readConsumed);
   readAvailable = Math.min(readAvailable, limiter.getReadAvailable());
   writeAvailable = Math.min(writeAvailable, limiter.getWriteAvailable());
 }
 
 for (final QuotaLimiter limiter: limiters) {
-  limiter.grabQuota(writeConsumed, readConsumed);
+  limiter.grabQuota(numWrites, writeConsumed, numReads + numScans, 
readConsumed);
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/0b4f3d1f/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
index acfdc52..3cca955 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
@@ -35,13 +35,13 @@ class NoopQuotaLimiter implements QuotaLimiter {
   }
 
   @Override
-  public void checkQuota(long estimateWriteSize, long estimateReadSize)
-  throws RpcThrottlingException {
+  public void checkQuota(long writeReqs, long estimateWriteSize, long readReqs,
+  long estimateReadSize) throws RpcThrottlingException {
 // no-op
   }
 
   @Override
-  public void grabQuota(long writeSize, long readSize) {
+  public void grabQuota(long writeReqs, long writeSize, long readReqs, long 
readSize) {
 // no-op
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/0b4f3d1f/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaLimiter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaLimiter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaLimiter.java
index 1144aec..7cb29b3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaLimiter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaLimiter.java
@@ -31,22 +31,26 @@ public interface QuotaLimiter {
   /**
* Checks if it is possible to execute the specified operation.
*
+   * @param writeReqs the write requests that will be checked against the 
available quota
* @param estimateWriteSize the write size that will be checked against the 
available quota
+   * @param readReqs the read requests that will be checked against the 
available quota
* @param estimateReadSize the read size that will be checked against the 
available quota
-   * @throws RpcThrottlingException thrown if not enough avialable resources 
to perform operation.
+   * @throws RpcThrottlingException thrown if not enough available resources 
to perform operation.
*/
-  void checkQuota(long estimateWriteSize, 

hbase git commit: HBASE-19924 hbase rpc throttling does not work for multi() with request count rater.

2018-04-20 Thread huaxiangsun
Repository: hbase
Updated Branches:
  refs/heads/branch-2 05f8e9419 -> 298ce9624


HBASE-19924 hbase rpc throttling does not work for multi() with request count 
rater.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/298ce962
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/298ce962
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/298ce962

Branch: refs/heads/branch-2
Commit: 298ce962460b575544f922795579488a3451c904
Parents: 05f8e94
Author: Huaxiang Sun 
Authored: Fri Apr 20 16:54:03 2018 -0700
Committer: Huaxiang Sun 
Committed: Fri Apr 20 16:55:23 2018 -0700

--
 .../hbase/quotas/DefaultOperationQuota.java |  4 +--
 .../hadoop/hbase/quotas/NoopQuotaLimiter.java   |  6 ++--
 .../hadoop/hbase/quotas/QuotaLimiter.java   | 12 ---
 .../hadoop/hbase/quotas/TimeBasedLimiter.java   | 37 ++-
 .../hadoop/hbase/quotas/TestQuotaState.java | 38 +---
 5 files changed, 66 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/298ce962/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
index 80b39a8..1265a42 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
@@ -69,13 +69,13 @@ public class DefaultOperationQuota implements 
OperationQuota {
 for (final QuotaLimiter limiter: limiters) {
   if (limiter.isBypass()) continue;
 
-  limiter.checkQuota(writeConsumed, readConsumed);
+  limiter.checkQuota(numWrites, writeConsumed, numReads + numScans, 
readConsumed);
   readAvailable = Math.min(readAvailable, limiter.getReadAvailable());
   writeAvailable = Math.min(writeAvailable, limiter.getWriteAvailable());
 }
 
 for (final QuotaLimiter limiter: limiters) {
-  limiter.grabQuota(writeConsumed, readConsumed);
+  limiter.grabQuota(numWrites, writeConsumed, numReads + numScans, 
readConsumed);
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/298ce962/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
index acfdc52..3cca955 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
@@ -35,13 +35,13 @@ class NoopQuotaLimiter implements QuotaLimiter {
   }
 
   @Override
-  public void checkQuota(long estimateWriteSize, long estimateReadSize)
-  throws RpcThrottlingException {
+  public void checkQuota(long writeReqs, long estimateWriteSize, long readReqs,
+  long estimateReadSize) throws RpcThrottlingException {
 // no-op
   }
 
   @Override
-  public void grabQuota(long writeSize, long readSize) {
+  public void grabQuota(long writeReqs, long writeSize, long readReqs, long 
readSize) {
 // no-op
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/298ce962/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaLimiter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaLimiter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaLimiter.java
index 1144aec..7cb29b3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaLimiter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaLimiter.java
@@ -31,22 +31,26 @@ public interface QuotaLimiter {
   /**
* Checks if it is possible to execute the specified operation.
*
+   * @param writeReqs the write requests that will be checked against the 
available quota
* @param estimateWriteSize the write size that will be checked against the 
available quota
+   * @param readReqs the read requests that will be checked against the 
available quota
* @param estimateReadSize the read size that will be checked against the 
available quota
-   * @throws RpcThrottlingException thrown if not enough avialable resources 
to perform operation.
+   * @throws RpcThrottlingException thrown if not enough available resources 
to perform operation.
*/
-  void checkQuota(long estimateWriteSize, 

hbase git commit: HBASE-19924 hbase rpc throttling does not work for multi() with request count rater.

2018-04-20 Thread huaxiangsun
Repository: hbase
Updated Branches:
  refs/heads/master 758f4296a -> e22f9e2d9


HBASE-19924 hbase rpc throttling does not work for multi() with request count 
rater.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e22f9e2d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e22f9e2d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e22f9e2d

Branch: refs/heads/master
Commit: e22f9e2d9729cc17cef932c2e14defad288c7a2d
Parents: 758f429
Author: Huaxiang Sun 
Authored: Fri Apr 20 16:54:03 2018 -0700
Committer: Huaxiang Sun 
Committed: Fri Apr 20 16:54:03 2018 -0700

--
 .../hbase/quotas/DefaultOperationQuota.java |  4 +--
 .../hadoop/hbase/quotas/NoopQuotaLimiter.java   |  6 ++--
 .../hadoop/hbase/quotas/QuotaLimiter.java   | 12 ---
 .../hadoop/hbase/quotas/TimeBasedLimiter.java   | 37 ++-
 .../hadoop/hbase/quotas/TestQuotaState.java | 38 +---
 5 files changed, 66 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e22f9e2d/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
index 80b39a8..1265a42 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/DefaultOperationQuota.java
@@ -69,13 +69,13 @@ public class DefaultOperationQuota implements 
OperationQuota {
 for (final QuotaLimiter limiter: limiters) {
   if (limiter.isBypass()) continue;
 
-  limiter.checkQuota(writeConsumed, readConsumed);
+  limiter.checkQuota(numWrites, writeConsumed, numReads + numScans, 
readConsumed);
   readAvailable = Math.min(readAvailable, limiter.getReadAvailable());
   writeAvailable = Math.min(writeAvailable, limiter.getWriteAvailable());
 }
 
 for (final QuotaLimiter limiter: limiters) {
-  limiter.grabQuota(writeConsumed, readConsumed);
+  limiter.grabQuota(numWrites, writeConsumed, numReads + numScans, 
readConsumed);
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/e22f9e2d/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
index acfdc52..3cca955 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/NoopQuotaLimiter.java
@@ -35,13 +35,13 @@ class NoopQuotaLimiter implements QuotaLimiter {
   }
 
   @Override
-  public void checkQuota(long estimateWriteSize, long estimateReadSize)
-  throws RpcThrottlingException {
+  public void checkQuota(long writeReqs, long estimateWriteSize, long readReqs,
+  long estimateReadSize) throws RpcThrottlingException {
 // no-op
   }
 
   @Override
-  public void grabQuota(long writeSize, long readSize) {
+  public void grabQuota(long writeReqs, long writeSize, long readReqs, long 
readSize) {
 // no-op
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/e22f9e2d/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaLimiter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaLimiter.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaLimiter.java
index 1144aec..7cb29b3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaLimiter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/QuotaLimiter.java
@@ -31,22 +31,26 @@ public interface QuotaLimiter {
   /**
* Checks if it is possible to execute the specified operation.
*
+   * @param writeReqs the write requests that will be checked against the 
available quota
* @param estimateWriteSize the write size that will be checked against the 
available quota
+   * @param readReqs the read requests that will be checked against the 
available quota
* @param estimateReadSize the read size that will be checked against the 
available quota
-   * @throws RpcThrottlingException thrown if not enough avialable resources 
to perform operation.
+   * @throws RpcThrottlingException thrown if not enough available resources 
to perform operation.
*/
-  void checkQuota(long estimateWriteSize, long 

hbase git commit: HBASE-20450 Provide metrics for number of total active, priority and replication rpc handlers

2018-04-20 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 273d25283 -> 758f4296a


HBASE-20450 Provide metrics for number of total active, priority and 
replication rpc handlers

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/758f4296
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/758f4296
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/758f4296

Branch: refs/heads/master
Commit: 758f4296a4a8c99e1b8409b0b0e9319559074bb9
Parents: 273d252
Author: Nihal Jain 
Authored: Thu Apr 19 14:05:13 2018 +0530
Committer: tedyu 
Committed: Fri Apr 20 16:24:32 2018 -0700

--
 .../hbase/ipc/MetricsHBaseServerSource.java |  8 ++-
 .../hbase/ipc/MetricsHBaseServerWrapper.java|  6 +
 .../hbase/ipc/MetricsHBaseServerSourceImpl.java |  8 +++
 .../hadoop/hbase/ipc/FifoRpcScheduler.java  | 16 -
 .../ipc/MetricsHBaseServerWrapperImpl.java  | 24 
 .../apache/hadoop/hbase/ipc/RpcScheduler.java   | 11 -
 .../hadoop/hbase/ipc/SimpleRpcScheduler.java| 20 +---
 .../hbase/ipc/DelegatingRpcScheduler.java   | 15 
 .../ipc/MetricsHBaseServerWrapperStub.java  | 15 
 .../apache/hadoop/hbase/ipc/TestRpcMetrics.java |  3 +++
 10 files changed, 120 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/758f4296/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
index 0833751..d98837f 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSource.java
@@ -76,7 +76,13 @@ public interface MetricsHBaseServerSource extends 
ExceptionTrackingSource {
   String NUM_OPEN_CONNECTIONS_NAME = "numOpenConnections";
   String NUM_OPEN_CONNECTIONS_DESC = "Number of open connections.";
   String NUM_ACTIVE_HANDLER_NAME = "numActiveHandler";
-  String NUM_ACTIVE_HANDLER_DESC = "Number of active rpc handlers.";
+  String NUM_ACTIVE_HANDLER_DESC = "Total number of active rpc handlers.";
+  String NUM_ACTIVE_GENERAL_HANDLER_NAME = "numActiveGeneralHandler";
+  String NUM_ACTIVE_GENERAL_HANDLER_DESC = "Number of active general rpc 
handlers.";
+  String NUM_ACTIVE_PRIORITY_HANDLER_NAME = "numActivePriorityHandler";
+  String NUM_ACTIVE_PRIORITY_HANDLER_DESC = "Number of active priority rpc 
handlers.";
+  String NUM_ACTIVE_REPLICATION_HANDLER_NAME = "numActiveReplicationHandler";
+  String NUM_ACTIVE_REPLICATION_HANDLER_DESC = "Number of active replication 
rpc handlers.";
   String NUM_ACTIVE_WRITE_HANDLER_NAME = "numActiveWriteHandler";
   String NUM_ACTIVE_WRITE_HANDLER_DESC = "Number of active write rpc 
handlers.";
   String NUM_ACTIVE_READ_HANDLER_NAME = "numActiveReadHandler";

http://git-wip-us.apache.org/repos/asf/hbase/blob/758f4296/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
index c80d1a9..c66ec59 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerWrapper.java
@@ -35,6 +35,12 @@ public interface MetricsHBaseServerWrapper {
 
   int getActiveRpcHandlerCount();
 
+  int getActiveGeneralRpcHandlerCount();
+
+  int getActivePriorityRpcHandlerCount();
+
+  int getActiveReplicationRpcHandlerCount();
+
   long getNumGeneralCallsDropped();
 
   long getNumLifoModeSwitches();

http://git-wip-us.apache.org/repos/asf/hbase/blob/758f4296/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java
--
diff --git 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java
 
b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java
index ce8b1b4..6e8b81d 100644
--- 
a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceImpl.java
+++ 

[3/3] hbase git commit: HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods

2018-04-20 Thread busbey
HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods

Signed-off-by: Josh Elser 
Signed-off-by: Ted Yu 
Signed-off-by: Sean Busbey 

 Conflicts:

hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java

hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fe84833e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fe84833e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fe84833e

Branch: refs/heads/branch-1
Commit: fe84833ea22c30b68022203132706ebb1e526852
Parents: 59d9e0f
Author: Kevin Risden 
Authored: Thu Apr 12 21:08:15 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 15:15:31 2018 -0500

--
 .../hadoop/hbase/http/TestHttpServer.java   | 13 ++--
 .../hadoop/hbase/thrift/ThriftServerRunner.java |  2 ++
 .../hbase/thrift/TestThriftHttpServer.java  | 21 
 3 files changed, 30 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/fe84833e/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
index cec3fd1..2cb6cb4 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -608,8 +608,6 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 myServer.stop();
   }
 
-
-
   @Test
   public void testNoCacheHeader() throws Exception {
 URL url = new URL(baseUrl, "/echo?a=b=d");
@@ -634,4 +632,15 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 .build();
 s.stop();
   }
+
+  @Test
+  public void testHttpMethods() throws Exception {
+// HTTP TRACE method should be disabled for security
+// See https://www.owasp.org/index.php/Cross_Site_Tracing
+URL url = new URL(baseUrl, "/echo?a=b");
+HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+conn.setRequestMethod("TRACE");
+conn.connect();
+assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode());
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/fe84833e/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index 07c18a7..8292e91 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -100,6 +100,7 @@ import org.apache.hadoop.hbase.thrift.generated.TScan;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ConnectionCache;
 import org.apache.hadoop.hbase.util.DNS;
+import org.apache.hadoop.hbase.util.HttpServerUtil;
 import org.apache.hadoop.hbase.util.JvmPauseMonitor;
 import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
@@ -422,6 +423,7 @@ public class ThriftServerRunner implements Runnable {
 String httpPath = "/*";
 httpServer.setHandler(context);
 context.addServlet(new ServletHolder(thriftHttpServlet), httpPath);
+HttpServerUtil.constrainHttpMethods(context);
 
 // set up Jetty and run the embedded server
 Connector connector = new SelectChannelConnector();

http://git-wip-us.apache.org/repos/asf/hbase/blob/fe84833e/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
--
diff --git 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
index cf14e87..ed91a29 100644
--- 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
+++ 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
@@ -22,6 +22,8 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.fail;
 
+import java.net.HttpURLConnection;
+import java.net.URL;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -40,6 +42,7 @@ import 

[2/3] hbase git commit: HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods

2018-04-20 Thread busbey
HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods

Signed-off-by: Josh Elser 
Signed-off-by: Ted Yu 
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/05f8e941
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/05f8e941
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/05f8e941

Branch: refs/heads/branch-2
Commit: 05f8e94191ef6a63baadf56d6114d7d0317796f2
Parents: 00821bc
Author: Kevin Risden 
Authored: Thu Apr 12 21:08:15 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 14:57:11 2018 -0500

--
 .../hadoop/hbase/http/TestHttpServer.java   | 13 ++--
 .../hadoop/hbase/thrift/ThriftServerRunner.java |  2 ++
 .../hbase/thrift/TestThriftHttpServer.java  | 21 
 3 files changed, 30 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/05f8e941/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
--
diff --git 
a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java 
b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
index 16350d5..10553da 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -605,8 +605,6 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 myServer.stop();
   }
 
-
-
   @Test
   public void testNoCacheHeader() throws Exception {
 URL url = new URL(baseUrl, "/echo?a=b=d");
@@ -619,4 +617,15 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 assertEquals(conn.getHeaderField("Expires"), conn.getHeaderField("Date"));
 assertEquals("DENY", conn.getHeaderField("X-Frame-Options"));
   }
+
+  @Test
+  public void testHttpMethods() throws Exception {
+// HTTP TRACE method should be disabled for security
+// See https://www.owasp.org/index.php/Cross_Site_Tracing
+URL url = new URL(baseUrl, "/echo?a=b");
+HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+conn.setRequestMethod("TRACE");
+conn.connect();
+assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode());
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/05f8e941/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index 16894ad..5d887f9 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -79,6 +79,7 @@ import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.ParseFilter;
 import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.filter.WhileMatchFilter;
+import org.apache.hadoop.hbase.http.HttpServerUtil;
 import org.apache.hadoop.hbase.log.HBaseMarkers;
 import org.apache.hadoop.hbase.security.SaslUtil;
 import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
@@ -445,6 +446,7 @@ public class ThriftServerRunner implements Runnable {
 // Context handler
 ServletContextHandler ctxHandler = new ServletContextHandler(httpServer, 
"/", ServletContextHandler.SESSIONS);
 ctxHandler.addServlet(new ServletHolder(thriftHttpServlet), "/*");
+HttpServerUtil.constrainHttpMethods(ctxHandler);
 
 // set up Jetty and run the embedded server
 HttpConfiguration httpConfig = new HttpConfiguration();

http://git-wip-us.apache.org/repos/asf/hbase/blob/05f8e941/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
--
diff --git 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
index bd156bc..c3fecf6 100644
--- 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
+++ 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
@@ -21,6 +21,8 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.fail;
 
+import java.net.HttpURLConnection;
+import java.net.URL;
 import java.util.ArrayList;
 import java.util.List;
 import 

[1/3] hbase git commit: HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods

2018-04-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1 59d9e0f40 -> fe84833ea
  refs/heads/branch-2 00821bcc2 -> 05f8e9419
  refs/heads/master afb6d3ecc -> 273d25283


HBase Thrift HTTP - Shouldn't handle TRACE/OPTIONS methods

Signed-off-by: Josh Elser 
Signed-off-by: Ted Yu 
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/273d2528
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/273d2528
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/273d2528

Branch: refs/heads/master
Commit: 273d252838e96c4b4af2401743d84e482c4ec565
Parents: afb6d3e
Author: Kevin Risden 
Authored: Thu Apr 12 21:08:15 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 14:38:59 2018 -0500

--
 .../hadoop/hbase/http/TestHttpServer.java   | 13 ++--
 .../hadoop/hbase/thrift/ThriftServerRunner.java |  2 ++
 .../hbase/thrift/TestThriftHttpServer.java  | 21 
 3 files changed, 30 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/273d2528/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
--
diff --git 
a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java 
b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
index 16350d5..10553da 100644
--- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
+++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java
@@ -605,8 +605,6 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 myServer.stop();
   }
 
-
-
   @Test
   public void testNoCacheHeader() throws Exception {
 URL url = new URL(baseUrl, "/echo?a=b=d");
@@ -619,4 +617,15 @@ public class TestHttpServer extends 
HttpServerFunctionalTest {
 assertEquals(conn.getHeaderField("Expires"), conn.getHeaderField("Date"));
 assertEquals("DENY", conn.getHeaderField("X-Frame-Options"));
   }
+
+  @Test
+  public void testHttpMethods() throws Exception {
+// HTTP TRACE method should be disabled for security
+// See https://www.owasp.org/index.php/Cross_Site_Tracing
+URL url = new URL(baseUrl, "/echo?a=b");
+HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+conn.setRequestMethod("TRACE");
+conn.connect();
+assertEquals(HttpURLConnection.HTTP_FORBIDDEN, conn.getResponseCode());
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/273d2528/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index 39ea259..28ba28a 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -79,6 +79,7 @@ import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.ParseFilter;
 import org.apache.hadoop.hbase.filter.PrefixFilter;
 import org.apache.hadoop.hbase.filter.WhileMatchFilter;
+import org.apache.hadoop.hbase.http.HttpServerUtil;
 import org.apache.hadoop.hbase.log.HBaseMarkers;
 import org.apache.hadoop.hbase.security.SaslUtil;
 import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
@@ -448,6 +449,7 @@ public class ThriftServerRunner implements Runnable {
 ServletContextHandler ctxHandler = new ServletContextHandler(httpServer, 
"/",
 ServletContextHandler.SESSIONS);
 ctxHandler.addServlet(new ServletHolder(thriftHttpServlet), "/*");
+HttpServerUtil.constrainHttpMethods(ctxHandler);
 
 // set up Jetty and run the embedded server
 HttpConfiguration httpConfig = new HttpConfiguration();

http://git-wip-us.apache.org/repos/asf/hbase/blob/273d2528/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
--
diff --git 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
index d583234..6117953 100644
--- 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
+++ 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
@@ -21,6 +21,8 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
 import static 

[2/3] hbase git commit: HBASE-20441 Use checkstyle to ban imports from commons-lang 2

2018-04-20 Thread busbey
HBASE-20441 Use checkstyle to ban imports from commons-lang 2

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/00821bcc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/00821bcc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/00821bcc

Branch: refs/heads/branch-2
Commit: 00821bcc20cbc0e34ebb721dfb5dcc74c347d0c2
Parents: 767cf4e
Author: Balazs Meszaros 
Authored: Thu Apr 19 14:06:56 2018 +0200
Committer: Sean Busbey 
Committed: Fri Apr 20 13:53:24 2018 -0500

--
 hbase-checkstyle/src/main/resources/hbase/checkstyle.xml | 10 +-
 1 file changed, 9 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/00821bcc/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
--
diff --git a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml 
b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
index b5c7c97..c77d46b9 100644
--- a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
+++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
@@ -77,7 +77,15 @@
   
 
 
-  
+  
 
 



[3/3] hbase git commit: HBASE-20441 Use checkstyle to ban imports from commons-lang 2

2018-04-20 Thread busbey
HBASE-20441 Use checkstyle to ban imports from commons-lang 2

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/19b0349a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/19b0349a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/19b0349a

Branch: refs/heads/branch-2.0
Commit: 19b0349a36db214b6c0e83f9eb56b2b0b2bd452a
Parents: 9820616
Author: Balazs Meszaros 
Authored: Thu Apr 19 14:06:56 2018 +0200
Committer: Sean Busbey 
Committed: Fri Apr 20 13:54:13 2018 -0500

--
 hbase-checkstyle/src/main/resources/hbase/checkstyle.xml | 10 +-
 1 file changed, 9 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/19b0349a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
--
diff --git a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml 
b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
index b5c7c97..c77d46b9 100644
--- a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
+++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
@@ -77,7 +77,15 @@
   
 
 
-  
+  
 
 



[1/3] hbase git commit: HBASE-20441 Use checkstyle to ban imports from commons-lang 2

2018-04-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-2 767cf4e60 -> 00821bcc2
  refs/heads/branch-2.0 98206168d -> 19b0349a3
  refs/heads/master 90fe98ae9 -> afb6d3ecc


HBASE-20441 Use checkstyle to ban imports from commons-lang 2

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/afb6d3ec
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/afb6d3ec
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/afb6d3ec

Branch: refs/heads/master
Commit: afb6d3eccef449bad8a5108919c50bdc37db1664
Parents: 90fe98a
Author: Balazs Meszaros 
Authored: Thu Apr 19 14:06:56 2018 +0200
Committer: Sean Busbey 
Committed: Fri Apr 20 13:38:24 2018 -0500

--
 hbase-checkstyle/src/main/resources/hbase/checkstyle.xml | 10 +-
 1 file changed, 9 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/afb6d3ec/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
--
diff --git a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml 
b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
index b5c7c97..c77d46b9 100644
--- a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
+++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
@@ -77,7 +77,15 @@
   
 
 
-  
+  
 
 



[2/3] hbase git commit: HBASE-20443 Use checkstyle to ban imports from commons-collections 3

2018-04-20 Thread busbey
HBASE-20443 Use checkstyle to ban imports from commons-collections 3

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/767cf4e6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/767cf4e6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/767cf4e6

Branch: refs/heads/branch-2
Commit: 767cf4e6055a820da5bbdb8d7d4e876f9ab28765
Parents: 8c7293c
Author: Balazs Meszaros 
Authored: Thu Apr 19 11:08:56 2018 +0200
Committer: Sean Busbey 
Committed: Fri Apr 20 12:34:39 2018 -0500

--
 hbase-checkstyle/src/main/resources/hbase/checkstyle.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/767cf4e6/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
--
diff --git a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml 
b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
index d0a15ad..b5c7c97 100644
--- a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
+++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
@@ -77,7 +77,7 @@
   
 
 
-  
+  
 
 



[3/3] hbase git commit: HBASE-20443 Use checkstyle to ban imports from commons-collections 3

2018-04-20 Thread busbey
HBASE-20443 Use checkstyle to ban imports from commons-collections 3

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/98206168
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/98206168
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/98206168

Branch: refs/heads/branch-2.0
Commit: 98206168d7eadb877882edbd011055b40345df50
Parents: 0207ed0
Author: Balazs Meszaros 
Authored: Thu Apr 19 11:08:56 2018 +0200
Committer: Sean Busbey 
Committed: Fri Apr 20 13:06:21 2018 -0500

--
 hbase-checkstyle/src/main/resources/hbase/checkstyle.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/98206168/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
--
diff --git a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml 
b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
index d0a15ad..b5c7c97 100644
--- a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
+++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
@@ -77,7 +77,7 @@
   
 
 
-  
+  
 
 



[1/3] hbase git commit: HBASE-20443 Use checkstyle to ban imports from commons-collections 3

2018-04-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-2 8c7293c51 -> 767cf4e60
  refs/heads/branch-2.0 0207ed03f -> 98206168d
  refs/heads/master 8219ec749 -> 90fe98ae9


HBASE-20443 Use checkstyle to ban imports from commons-collections 3

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/90fe98ae
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/90fe98ae
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/90fe98ae

Branch: refs/heads/master
Commit: 90fe98ae99dd13f2194bc6eea37606070d13f268
Parents: 8219ec7
Author: Balazs Meszaros 
Authored: Thu Apr 19 11:08:56 2018 +0200
Committer: Sean Busbey 
Committed: Fri Apr 20 12:06:58 2018 -0500

--
 hbase-checkstyle/src/main/resources/hbase/checkstyle.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/90fe98ae/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
--
diff --git a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml 
b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
index d0a15ad..b5c7c97 100644
--- a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
+++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
@@ -77,7 +77,7 @@
   
 
 
-  
+  
 
 



[2/4] hbase git commit: HBASE-20293 get_splits returns duplicate split points when region replication is on

2018-04-20 Thread busbey
HBASE-20293 get_splits returns duplicate split points when region replication 
is on

Signed-off-by: Ted Yu 
Signed-off-by: Huaxiang Sun 
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/59d9e0f4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/59d9e0f4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/59d9e0f4

Branch: refs/heads/branch-1
Commit: 59d9e0f407cbcb387bc850a56c919a8c12509160
Parents: af172e0
Author: Toshihiro Suzuki 
Authored: Wed Apr 18 14:47:04 2018 +0900
Committer: Sean Busbey 
Committed: Fri Apr 20 12:40:50 2018 -0500

--
 hbase-shell/src/main/ruby/hbase/table.rb  | 52 ++
 hbase-shell/src/test/ruby/hbase/table_test.rb | 18 +++-
 hbase-shell/src/test/ruby/test_helper.rb  | 11 +
 3 files changed, 61 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/59d9e0f4/hbase-shell/src/main/ruby/hbase/table.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/table.rb 
b/hbase-shell/src/main/ruby/hbase/table.rb
index 4109006..8e3f6e4 100644
--- a/hbase-shell/src/main/ruby/hbase/table.rb
+++ b/hbase-shell/src/main/ruby/hbase/table.rb
@@ -20,6 +20,8 @@
 include Java
 
 java_import org.apache.hadoop.hbase.util.Bytes
+java_import org.apache.hadoop.hbase.client.RegionReplicaUtil
+java_import org.apache.hadoop.hbase.client.Scan
 
 # Wrapper for org.apache.hadoop.hbase.client.Table
 
@@ -48,8 +50,9 @@ module Hbase
   method  = name.to_sym
   self.class_eval do
 define_method method do |*args|
-@shell.internal_command(shell_command, internal_method_name, self, 
*args)
- end
+  @shell.internal_command(shell_command, internal_method_name, self,
+  *args)
+end
   end
 end
 
@@ -143,7 +146,7 @@ EOF
   end
   #Case where attributes are specified without timestamp
   if timestamp.kind_of?(Hash)
-   timestamp.each do |k, v|
+timestamp.each do |k, v|
   if k == 'ATTRIBUTES'
 set_attributes(p, v)
   elsif k == 'VISIBILITY'
@@ -185,12 +188,12 @@ EOF
  timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP
   end
   d = org.apache.hadoop.hbase.client.Delete.new(row.to_s.to_java_bytes, 
timestamp)
-  if temptimestamp.kind_of?(Hash)
-   temptimestamp.each do |k, v|
- if v.kind_of?(String)
-   set_cell_visibility(d, v) if v
- end
-end
+  if temptimestamp.is_a?(Hash)
+temptimestamp.each do |_, v|
+  if v.is_a?(String)
+set_cell_visibility(d, v) if v
+  end
+end
   end
   if args.any?
  visibility = args[VISIBILITY]
@@ -262,9 +265,11 @@ EOF
 
 
#--
 # Count rows in a table
+
+# rubocop:disable Metrics/AbcSize
 def _count_internal(interval = 1000, caching_rows = 10)
   # We can safely set scanner caching with the first key only filter
-  scan = org.apache.hadoop.hbase.client.Scan.new
+  scan = Scan.new
   scan.setCacheBlocks(false)
   scan.setCaching(caching_rows)
   scan.setFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter.new)
@@ -288,6 +293,7 @@ EOF
   # Return the counter
   return count
 end
+# rubocop:enable Metrics/AbcSize
 
 
#--
 # Get from table
@@ -425,6 +431,8 @@ EOF
   org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValue)
 end
 
+# rubocop:disable Metrics/AbcSize
+# rubocop:disable Metrics/MethodLength
 def _hash_to_scan(args)
   if args.any?
 enablemetrics = args["ALL_METRICS"].nil? ? false : args["ALL_METRICS"]
@@ -453,10 +461,10 @@ EOF
 end
 
 scan = if stoprow
-  org.apache.hadoop.hbase.client.Scan.new(startrow.to_java_bytes, 
stoprow.to_java_bytes)
-else
-  org.apache.hadoop.hbase.client.Scan.new(startrow.to_java_bytes)
-end
+ Scan.new(startrow.to_java_bytes, stoprow.to_java_bytes)
+   else
+ Scan.new(startrow.to_java_bytes)
+   end
 
 # This will overwrite any startrow/stoprow settings
 scan.setRowPrefixFilter(rowprefixfilter.to_java_bytes) if 
rowprefixfilter
@@ -493,11 +501,13 @@ EOF
 set_authorizations(scan, authorizations) if authorizations
 
scan.setConsistency(org.apache.hadoop.hbase.client.Consistency.valueOf(consistency))
 if 

[1/4] hbase git commit: HBASE-20463 Ruby 1.8 syntax fixes.

2018-04-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1 8a244e51d -> 59d9e0f40
  refs/heads/branch-1.4 e075492b4 -> 1be417a99


HBASE-20463 Ruby 1.8 syntax fixes.

branches-1 specific addendum to HBASE-20276

Signed-off-by: Huaxiang Sun 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/af172e06
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/af172e06
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/af172e06

Branch: refs/heads/branch-1
Commit: af172e06040c15ccd24b51ecd2ce54ee63c684bc
Parents: 8a244e5
Author: Sean Busbey 
Authored: Wed Apr 18 16:33:03 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 12:38:28 2018 -0500

--
 hbase-shell/src/main/ruby/hbase/table.rb   | 13 +
 hbase-shell/src/main/ruby/shell/commands/get_splits.rb |  6 --
 2 files changed, 13 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/af172e06/hbase-shell/src/main/ruby/hbase/table.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/table.rb 
b/hbase-shell/src/main/ruby/hbase/table.rb
index 3e3fb8e..4109006 100644
--- a/hbase-shell/src/main/ruby/hbase/table.rb
+++ b/hbase-shell/src/main/ruby/hbase/table.rb
@@ -718,14 +718,19 @@ EOF
 
 
#--
 # Get the split points for the table
+# Disable multiline block chain because we need it to stay under width
+# in ruby 1.8.
+# rubocop:disable Style/MultilineBlockChain
 def _get_splits_internal()
-  locator = @table.getRegionLocator()
-  locator.getAllRegionLocations()
- .map { |i| Bytes.toStringBinary(i.getRegionInfo().getStartKey) }
- .delete_if { |k| k == "" }
+  locator = @table.getRegionLocator
+  locator.getAllRegionLocations.map do |i|
+Bytes.toStringBinary(i.getRegionInfo.getStartKey)
+  end.delete_if { |k| k == '' }
 ensure
   locator.close()
 end
   end
+  # rubocop:enable Style/MultilineBlockChain
+
   # rubocop:enable Metrics/ClassLength
 end

http://git-wip-us.apache.org/repos/asf/hbase/blob/af172e06/hbase-shell/src/main/ruby/shell/commands/get_splits.rb
--
diff --git a/hbase-shell/src/main/ruby/shell/commands/get_splits.rb 
b/hbase-shell/src/main/ruby/shell/commands/get_splits.rb
index 26be15f..c090427 100644
--- a/hbase-shell/src/main/ruby/shell/commands/get_splits.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/get_splits.rb
@@ -36,12 +36,14 @@ EOF
 get_splits(table(table))
   end
 
+  # Disable format string because it doesn't work in ruby 1.8
+  # rubocop:disable Style/FormatStringToken
   def get_splits(table)
 splits = table._get_splits_internal()
-puts(format('Total number of splits = %d',
-numsplits: (splits.size + 1)))
+puts(format('Total number of splits = %d', (splits.size + 1)))
 splits
   end
+  # rubocop:enable Style/FormatStringToken
 end
   end
 end



[4/4] hbase git commit: HBASE-20293 get_splits returns duplicate split points when region replication is on

2018-04-20 Thread busbey
HBASE-20293 get_splits returns duplicate split points when region replication 
is on

Signed-off-by: Ted Yu 
Signed-off-by: Huaxiang Sun 
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1be417a9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1be417a9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1be417a9

Branch: refs/heads/branch-1.4
Commit: 1be417a99d01e839355fab495d61d2db8ce7f4cd
Parents: ff0c141
Author: Toshihiro Suzuki 
Authored: Wed Apr 18 14:47:04 2018 +0900
Committer: Sean Busbey 
Committed: Fri Apr 20 12:46:44 2018 -0500

--
 hbase-shell/src/main/ruby/hbase/table.rb  | 52 ++
 hbase-shell/src/test/ruby/hbase/table_test.rb | 18 +++-
 hbase-shell/src/test/ruby/test_helper.rb  | 11 +
 3 files changed, 61 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1be417a9/hbase-shell/src/main/ruby/hbase/table.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/table.rb 
b/hbase-shell/src/main/ruby/hbase/table.rb
index 4109006..8e3f6e4 100644
--- a/hbase-shell/src/main/ruby/hbase/table.rb
+++ b/hbase-shell/src/main/ruby/hbase/table.rb
@@ -20,6 +20,8 @@
 include Java
 
 java_import org.apache.hadoop.hbase.util.Bytes
+java_import org.apache.hadoop.hbase.client.RegionReplicaUtil
+java_import org.apache.hadoop.hbase.client.Scan
 
 # Wrapper for org.apache.hadoop.hbase.client.Table
 
@@ -48,8 +50,9 @@ module Hbase
   method  = name.to_sym
   self.class_eval do
 define_method method do |*args|
-@shell.internal_command(shell_command, internal_method_name, self, 
*args)
- end
+  @shell.internal_command(shell_command, internal_method_name, self,
+  *args)
+end
   end
 end
 
@@ -143,7 +146,7 @@ EOF
   end
   #Case where attributes are specified without timestamp
   if timestamp.kind_of?(Hash)
-   timestamp.each do |k, v|
+timestamp.each do |k, v|
   if k == 'ATTRIBUTES'
 set_attributes(p, v)
   elsif k == 'VISIBILITY'
@@ -185,12 +188,12 @@ EOF
  timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP
   end
   d = org.apache.hadoop.hbase.client.Delete.new(row.to_s.to_java_bytes, 
timestamp)
-  if temptimestamp.kind_of?(Hash)
-   temptimestamp.each do |k, v|
- if v.kind_of?(String)
-   set_cell_visibility(d, v) if v
- end
-end
+  if temptimestamp.is_a?(Hash)
+temptimestamp.each do |_, v|
+  if v.is_a?(String)
+set_cell_visibility(d, v) if v
+  end
+end
   end
   if args.any?
  visibility = args[VISIBILITY]
@@ -262,9 +265,11 @@ EOF
 
 
#--
 # Count rows in a table
+
+# rubocop:disable Metrics/AbcSize
 def _count_internal(interval = 1000, caching_rows = 10)
   # We can safely set scanner caching with the first key only filter
-  scan = org.apache.hadoop.hbase.client.Scan.new
+  scan = Scan.new
   scan.setCacheBlocks(false)
   scan.setCaching(caching_rows)
   scan.setFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter.new)
@@ -288,6 +293,7 @@ EOF
   # Return the counter
   return count
 end
+# rubocop:enable Metrics/AbcSize
 
 
#--
 # Get from table
@@ -425,6 +431,8 @@ EOF
   org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValue)
 end
 
+# rubocop:disable Metrics/AbcSize
+# rubocop:disable Metrics/MethodLength
 def _hash_to_scan(args)
   if args.any?
 enablemetrics = args["ALL_METRICS"].nil? ? false : args["ALL_METRICS"]
@@ -453,10 +461,10 @@ EOF
 end
 
 scan = if stoprow
-  org.apache.hadoop.hbase.client.Scan.new(startrow.to_java_bytes, 
stoprow.to_java_bytes)
-else
-  org.apache.hadoop.hbase.client.Scan.new(startrow.to_java_bytes)
-end
+ Scan.new(startrow.to_java_bytes, stoprow.to_java_bytes)
+   else
+ Scan.new(startrow.to_java_bytes)
+   end
 
 # This will overwrite any startrow/stoprow settings
 scan.setRowPrefixFilter(rowprefixfilter.to_java_bytes) if 
rowprefixfilter
@@ -493,11 +501,13 @@ EOF
 set_authorizations(scan, authorizations) if authorizations
 
scan.setConsistency(org.apache.hadoop.hbase.client.Consistency.valueOf(consistency))
 if 

[3/4] hbase git commit: HBASE-20463 Ruby 1.8 syntax fixes.

2018-04-20 Thread busbey
HBASE-20463 Ruby 1.8 syntax fixes.

branches-1 specific addendum to HBASE-20276

Signed-off-by: Huaxiang Sun 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ff0c1418
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ff0c1418
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ff0c1418

Branch: refs/heads/branch-1.4
Commit: ff0c14182c19967305a6894fa21988709ef41ff6
Parents: e075492
Author: Sean Busbey 
Authored: Wed Apr 18 16:33:03 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 12:46:37 2018 -0500

--
 hbase-shell/src/main/ruby/hbase/table.rb   | 13 +
 hbase-shell/src/main/ruby/shell/commands/get_splits.rb |  6 --
 2 files changed, 13 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ff0c1418/hbase-shell/src/main/ruby/hbase/table.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/table.rb 
b/hbase-shell/src/main/ruby/hbase/table.rb
index 3e3fb8e..4109006 100644
--- a/hbase-shell/src/main/ruby/hbase/table.rb
+++ b/hbase-shell/src/main/ruby/hbase/table.rb
@@ -718,14 +718,19 @@ EOF
 
 
#--
 # Get the split points for the table
+# Disable multiline block chain because we need it to stay under width
+# in ruby 1.8.
+# rubocop:disable Style/MultilineBlockChain
 def _get_splits_internal()
-  locator = @table.getRegionLocator()
-  locator.getAllRegionLocations()
- .map { |i| Bytes.toStringBinary(i.getRegionInfo().getStartKey) }
- .delete_if { |k| k == "" }
+  locator = @table.getRegionLocator
+  locator.getAllRegionLocations.map do |i|
+Bytes.toStringBinary(i.getRegionInfo.getStartKey)
+  end.delete_if { |k| k == '' }
 ensure
   locator.close()
 end
   end
+  # rubocop:enable Style/MultilineBlockChain
+
   # rubocop:enable Metrics/ClassLength
 end

http://git-wip-us.apache.org/repos/asf/hbase/blob/ff0c1418/hbase-shell/src/main/ruby/shell/commands/get_splits.rb
--
diff --git a/hbase-shell/src/main/ruby/shell/commands/get_splits.rb 
b/hbase-shell/src/main/ruby/shell/commands/get_splits.rb
index 26be15f..c090427 100644
--- a/hbase-shell/src/main/ruby/shell/commands/get_splits.rb
+++ b/hbase-shell/src/main/ruby/shell/commands/get_splits.rb
@@ -36,12 +36,14 @@ EOF
 get_splits(table(table))
   end
 
+  # Disable format string because it doesn't work in ruby 1.8
+  # rubocop:disable Style/FormatStringToken
   def get_splits(table)
 splits = table._get_splits_internal()
-puts(format('Total number of splits = %d',
-numsplits: (splits.size + 1)))
+puts(format('Total number of splits = %d', (splits.size + 1)))
 splits
   end
+  # rubocop:enable Style/FormatStringToken
 end
   end
 end



[hbase] Git Push Summary

2018-04-20 Thread stack
Repository: hbase
Updated Tags:  refs/tags/2.0.0RC1 [created] c69837e8d


svn commit: r26442 [1/3] - in /dev/hbase: hbase-2.0.0RC0/ hbase-2.0.0RC1/

2018-04-20 Thread stack
Author: stack
Date: Fri Apr 20 17:52:05 2018
New Revision: 26442

Log:
Remove 2.0.0RC0 and push 2.0.0RC1

Added:
dev/hbase/hbase-2.0.0RC1/
dev/hbase/hbase-2.0.0RC1/compatibiliity_report_1.2.6vs2.0.0RC1.html
dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-bin.tar.gz   (with props)
dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-bin.tar.gz.asc
dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-bin.tar.gz.sha512
dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-src.tar.gz   (with props)
dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-src.tar.gz.asc
dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-src.tar.gz.sha512
Removed:
dev/hbase/hbase-2.0.0RC0/



svn commit: r26442 [3/3] - in /dev/hbase: hbase-2.0.0RC0/ hbase-2.0.0RC1/

2018-04-20 Thread stack
Added: dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-bin.tar.gz
==
Binary file - no diff available.

Propchange: dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-bin.tar.gz
--
svn:mime-type = application/octet-stream

Added: dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-bin.tar.gz.asc
==
--- dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-bin.tar.gz.asc (added)
+++ dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-bin.tar.gz.asc Fri Apr 20 17:52:05 2018
@@ -0,0 +1,16 @@
+-BEGIN PGP SIGNATURE-
+
+iQIcBAABCAAGBQJa2iG7AAoJEJgWx/yKzJPSkOsP/R2GfLNbxVlOpI5btTks/Sag
+ee7a/2maFsHQyr00y5IVfJEWFsCVNhwWc4rSzBiXb3ktLYWJBAo6C27GZZ8zl2Lx
+KEZz6Nlqiy1Q0Biw/odrFXNnbuRKKGr8F8lGM09QmB0Yt6loxcHhN325ys/UBu+Y
+PQ9lFOWFn+sm8TajscPHNDJLQStKzZlfv1fb8xfDCXc+3ITAWMLxtcSETuzym/lq
+XdX/YeP4/2ZBmtlKh0pxaPRMv5MEULp5l6PM+BuT87yP6zKHZKTti9MScggV1p4z
+aj2pTLZ269q5DoDhwawnvMpqNhhoLF2yNS7/3q9px5U44IwUUhEnio9e1pOx1IbK
+s0/AtBqWy+o+DQMLm8C68qH7k0Y+e5COj6f/FRNATKELJARfC7WkBwwz3Xb3sT9Z
+f9cD6P3u3jGqc+HMIjV6ZvSTJXztoylhuqHG+BQQkMsXqNTj8oEDCx/EmzzbyOmX
+haPUburi9BA8ElmKOdPjH6eFQ0txiUsd1c1hGfsE6+A7nstnLhIwsPm8fjLUq+wm
+W/0QwAkfUKvHmdfji74ze1wayqBtOPMHDDHeKLaoI21tDlFzKx4TGqHxLlSs6awg
+gnzzm4iadRgcWukgB87O4RDeLF8B87ZIpU7OtRRfs6TMPsg2J9p8WfBLLbB9Z7J3
+finCDMfuxCRq02iyjlaW
+=K/wc
+-END PGP SIGNATURE-

Added: dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-bin.tar.gz.sha512
==
--- dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-bin.tar.gz.sha512 (added)
+++ dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-bin.tar.gz.sha512 Fri Apr 20 17:52:05 
2018
@@ -0,0 +1,3 @@
+hbase-2.0.0-bin.tar.gz: 5B730CE9 BBFFDE6A D5D646FF 67093F1F 9E2E2928 325CE17A
+EFFA3C54 F0F669F3 1C463F4A CCEB8372 F68F024A 2341413C
+A6752BC7 343219B1 D2E40837 122C6051

Added: dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-src.tar.gz
==
Binary file - no diff available.

Propchange: dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-src.tar.gz
--
svn:mime-type = application/octet-stream

Added: dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-src.tar.gz.asc
==
--- dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-src.tar.gz.asc (added)
+++ dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-src.tar.gz.asc Fri Apr 20 17:52:05 2018
@@ -0,0 +1,16 @@
+-BEGIN PGP SIGNATURE-
+
+iQIcBAABCAAGBQJa2iG8AAoJEJgWx/yKzJPS0DUP/1jahz4j2UwNRUMKNUZrEaGY
+WuPb+dCG4VQ3y/SB5xB24AX1q45Lk7D4gDPFiy13lpuVC9zzfmPlIlScR3rRRtWl
+b5BfTuYk+WJUIoSSsBmnMq+I3TG2EBGugm+6M8CWYOb+r1f7Y1tTum6FvCpMdQ0k
+hoLtxyXYAMk4De1qYliw0a+GMH1EYG+sIghafUVZpm65bXocWeJL4FfnbURX/blG
+OUyZUGktEeQE7cNtyrT3NW6hofCaQo4am/ePHcObs5JL9NvISwS/3z2hicD+Fqnz
+HiUq7pGAAYh455xX7xMG0UN/V4GWgmL1NgFJzCW9oXDzPG+ys9zObEqH0hIiriT/
++1ca9B9q0h6k2YXrGd8NZ0dGlsJyxI/Znss5r7t6VhHhmkLPsiWI35G7Q1/Us1O3
+wnXTVTz0z1GiMWgUYgszhWHm+PDhn/HRkvH6Nyj2xIUvRGXJG8ntjAJ7QVvF91P1
+Nfvl7Giy1vsV6RtdN/86AhFs4bO88uc3s/no6mq+Y/I2/wPdvki+oOoMoBWTWAOY
+pRKnbKp19uRQOvm+udyGPCQzEsv1vAOPkZwsDYT9AtqooQx4h1EyVNSb8ssdRVXz
+R8vDU/04KT2xqps6KxGH4KPSMF+98I8369lUPBHVaal65OssbTGOI5CZVjjOeeT8
+sxysUCIwJCYtvI1KxlGp
+=WGDI
+-END PGP SIGNATURE-

Added: dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-src.tar.gz.sha512
==
--- dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-src.tar.gz.sha512 (added)
+++ dev/hbase/hbase-2.0.0RC1/hbase-2.0.0-src.tar.gz.sha512 Fri Apr 20 17:52:05 
2018
@@ -0,0 +1,3 @@
+hbase-2.0.0-src.tar.gz: D0B915DA 9D73CB72 76A55F62 3C378180 567064A7 DEC703B2
+4890A221 FCAC8634 A2D5D9EB 666CEF46 2DB43C07 A9F74EF0
+1F17111B 5EAEBA38 EBF8A475 C23FC15D




svn commit: r26442 [2/3] - in /dev/hbase: hbase-2.0.0RC0/ hbase-2.0.0RC1/

2018-04-20 Thread stack

Added: dev/hbase/hbase-2.0.0RC1/compatibiliity_report_1.2.6vs2.0.0RC1.html
==
--- dev/hbase/hbase-2.0.0RC1/compatibiliity_report_1.2.6vs2.0.0RC1.html (added)
+++ dev/hbase/hbase-2.0.0RC1/compatibiliity_report_1.2.6vs2.0.0RC1.html Fri Apr 
20 17:52:05 2018
@@ -0,0 +1,12741 @@
+
+
+http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
+http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
+
+
+
+
+hbase: rel/1.2.6 to 2.0.0RC1 compatibility report
+
+body {
+font-family:Arial, sans-serif;
+background-color:White;
+color:Black;
+}
+hr {
+color:Black;
+background-color:Black;
+height:1px;
+border:0;
+}
+h1 {
+margin-bottom:0px;
+padding-bottom:0px;
+font-size:1.625em;
+}
+h2 {
+margin-bottom:0px;
+padding-bottom:0px;
+font-size:1.25em;
+white-space:nowrap;
+}
+div.symbols {
+color:#003E69;
+}
+div.symbols i {
+color:Brown;
+}
+span.section {
+font-weight:bold;
+cursor:pointer;
+color:#003E69;
+white-space:nowrap;
+margin-left:0.3125em;
+}
+span:hover.section {
+color:#336699;
+}
+span.sect_aff {
+cursor:pointer;
+padding-left:1.55em;
+font-size:0.875em;
+color:#cc3300;
+}
+span.ext {
+font-weight:100;
+}
+span.jar {
+color:#cc3300;
+font-size:0.875em;
+font-weight:bold;
+}
+div.jar_list {
+padding-left:0.4em;
+font-size:0.94em;
+}
+span.pkg_t {
+color:#408080;
+font-size:0.875em;
+}
+span.pkg {
+color:#408080;
+font-size:0.875em;
+font-weight:bold;
+}
+span.cname {
+color:Green;
+font-size:0.875em;
+font-weight:bold;
+}
+span.iname_b {
+font-weight:bold;
+}
+span.iname_a {
+color:#33;
+font-weight:bold;
+font-size:0.94em;
+}
+span.sym_p {
+font-weight:normal;
+white-space:normal;
+}
+span.sym_pd {
+white-space:normal;
+}
+span.sym_p span, span.sym_pd span {
+white-space:nowrap;
+}
+span.attr {
+color:Black;
+font-weight:100;
+}
+span.deprecated {
+color:Red;
+font-weight:bold;
+font-family:Monaco, monospace;
+}
+div.affect {
+padding-left:1em;
+padding-bottom:10px;
+font-size:0.87em;
+font-style:italic;
+line-height:0.9em;
+}
+div.affected {
+padding-left:2em;
+padding-top:10px;
+}
+table.ptable {
+border-collapse:collapse;
+border:1px outset black;
+margin-left:0.95em;
+margin-top:3px;
+margin-bottom:3px;
+width:56.25em;
+}
+table.ptable td {
+border:1px solid Gray;
+padding:3px;
+font-size:0.875em;
+text-align:left;
+vertical-align:top;
+max-width:28em;
+word-wrap:break-word;
+}
+table.ptable th {
+background-color:#ee;
+font-weight:bold;
+color:#33;
+font-family:Verdana, Arial;
+font-size:0.875em;
+border:1px solid Gray;
+text-align:center;
+vertical-align:top;
+white-space:nowrap;
+padding:3px;
+}
+table.summary {
+border-collapse:collapse;
+border:1px outset black;
+}
+table.summary th {
+background-color:#ee;
+font-weight:100;
+text-align:left;
+font-size:0.94em;
+white-space:nowrap;
+border:1px inset Gray;
+padding:3px;
+}
+table.summary td {
+text-align:right;
+white-space:nowrap;
+border:1px inset Gray;
+padding:3px 5px 3px 10px;
+}
+span.mngl {
+padding-left:1em;
+font-size:0.875em;
+cursor:text;
+color:#44;
+font-weight:bold;
+}
+span.pleft {
+padding-left:2.5em;
+}
+span.color_p {
+font-style:italic;
+color:Brown;
+}
+span.param {
+font-style:italic;
+}
+span.focus_p {
+font-style:italic;
+background-color:#DCDCDC;
+}
+span.ttype {
+font-weight:100;
+}
+span.nowrap {
+white-space:nowrap;
+}
+span.value {
+white-space:nowrap;
+font-weight:bold;
+}
+.passed {
+background-color:#CCFFCC;
+font-weight:100;
+}
+.warning {
+background-color:#F4F4AF;
+font-weight:100;
+}
+.failed {
+background-color:#FF;
+font-weight:100;
+}
+.new {
+background-color:#C6DEFF;
+font-weight:100;
+}
+
+.compatible {
+background-color:#CCFFCC;
+font-weight:100;
+}
+.almost_compatible {
+background-color:#FFDAA3;
+font-weight:100;
+}
+.incompatible {
+background-color:#FF;
+font-weight:100;
+}
+.gray {
+background-color:#DCDCDC;
+font-weight:100;
+}
+
+.top_ref {
+font-size:0.69em;
+}
+.footer {
+font-size:0.8125em;
+}
+.tabset {
+float:left;
+}
+a.tab {
+border:1px solid Black;
+float:left;
+margin:0px 5px -1px 0px;
+padding:3px 5px 3px 5px;
+position:relative;
+font-size:0.875em;
+background-color:#DDD;
+text-decoration:none;
+color:Black;
+}
+a.disabled:hover
+{
+color:Black;
+background:#EEE;
+}
+a.active:hover
+{
+color:Black;
+background:White;
+}
+a.active {
+border-bottom-color:White;
+background-color:White;
+}
+div.tab {
+border-top:1px 

[2/2] hbase git commit: HBASE-20438 Add an HBase antipattern check for reintroducing commons-logging

2018-04-20 Thread busbey
HBASE-20438 Add an HBase antipattern check for reintroducing commons-logging

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0207ed03
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0207ed03
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0207ed03

Branch: refs/heads/branch-2.0
Commit: 0207ed03f3bdf4a800c575f9414d1abe7062a49a
Parents: d8985e0
Author: Nihal Jain 
Authored: Wed Apr 18 23:03:07 2018 +0530
Committer: Sean Busbey 
Committed: Fri Apr 20 11:59:03 2018 -0500

--
 dev-support/hbase-personality.sh | 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0207ed03/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index e047a5a..c616be2 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -642,6 +642,12 @@ function hbaseanti_patchfile
 ((result=result+1))
   fi
 
+  warnings=$(${GREP} -cE 'org.apache.commons.logging.Log(Factory|;)' 
"${patchfile}")
+  if [[ ${warnings} -gt 0 ]]; then
+add_vote_table -1 hbaseanti "" "The patch appears to use commons-logging 
instead of slf4j."
+((result=result+1))
+  fi
+
   if [[ ${result} -gt 0 ]]; then
 return 1
   fi



[1/2] hbase git commit: HBASE-20438 Add an HBase antipattern check for reintroducing commons-logging

2018-04-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-2 22e7ae031 -> 8c7293c51
  refs/heads/branch-2.0 d8985e04b -> 0207ed03f


HBASE-20438 Add an HBase antipattern check for reintroducing commons-logging

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8c7293c5
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8c7293c5
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8c7293c5

Branch: refs/heads/branch-2
Commit: 8c7293c5147f788a7e7fa150868df4d15f0e0f55
Parents: 22e7ae0
Author: Nihal Jain 
Authored: Wed Apr 18 23:03:07 2018 +0530
Committer: Sean Busbey 
Committed: Fri Apr 20 11:58:39 2018 -0500

--
 dev-support/hbase-personality.sh | 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8c7293c5/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 2198913..b010503 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -642,6 +642,12 @@ function hbaseanti_patchfile
 ((result=result+1))
   fi
 
+  warnings=$(${GREP} -cE 'org.apache.commons.logging.Log(Factory|;)' 
"${patchfile}")
+  if [[ ${warnings} -gt 0 ]]; then
+add_vote_table -1 hbaseanti "" "The patch appears to use commons-logging 
instead of slf4j."
+((result=result+1))
+  fi
+
   if [[ ${result} -gt 0 ]]; then
 return 1
   fi



hbase git commit: HBASE-20438 Add an HBase antipattern check for reintroducing commons-logging

2018-04-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/master 4e183748c -> 8219ec749


HBASE-20438 Add an HBase antipattern check for reintroducing commons-logging

Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8219ec74
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8219ec74
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8219ec74

Branch: refs/heads/master
Commit: 8219ec749331eb163060f5ffbeedd8ae826ebfe1
Parents: 4e18374
Author: Nihal Jain 
Authored: Wed Apr 18 23:03:07 2018 +0530
Committer: Sean Busbey 
Committed: Fri Apr 20 11:54:01 2018 -0500

--
 dev-support/hbase-personality.sh | 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8219ec74/dev-support/hbase-personality.sh
--
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 2198913..b010503 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -642,6 +642,12 @@ function hbaseanti_patchfile
 ((result=result+1))
   fi
 
+  warnings=$(${GREP} -cE 'org.apache.commons.logging.Log(Factory|;)' 
"${patchfile}")
+  if [[ ${warnings} -gt 0 ]]; then
+add_vote_table -1 hbaseanti "" "The patch appears to use commons-logging 
instead of slf4j."
+((result=result+1))
+  fi
+
   if [[ ${result} -gt 0 ]]; then
 return 1
   fi



[3/3] hbase git commit: HBASE-20006 TestRestoreSnapshotFromClientWithRegionReplicas is flakey

2018-04-20 Thread busbey
HBASE-20006 TestRestoreSnapshotFromClientWithRegionReplicas is flakey

Signed-off-by: Ted Yu 
Signed-off-by: Sean Busbey 

 Conflicts:

hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRestoreSnapshotFromClientWithRegionReplicas.java

hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8a244e51
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8a244e51
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8a244e51

Branch: refs/heads/branch-1
Commit: 8a244e51d7c9e54c81c21930046d1758f306dfa0
Parents: 03eb3d2
Author: Toshihiro Suzuki 
Authored: Sun Mar 4 14:30:07 2018 +0900
Committer: Sean Busbey 
Committed: Fri Apr 20 11:34:45 2018 -0500

--
 .../hbase/regionserver/StoreFileInfo.java   | 37 ++--
 .../hbase/util/ServerRegionReplicaUtil.java | 27 ++
 .../regionserver/TestHRegionReplayEvents.java   | 10 +++---
 3 files changed, 51 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8a244e51/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
index 12da6b7..76551e2 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
@@ -145,13 +145,12 @@ public class StoreFileInfo {
 
   /**
* Create a Store File Info from an HFileLink
-   * @param conf the {@link Configuration} to use
-   * @param fs The current file system to use.
+   * @param conf The {@link Configuration} to use
+   * @param fs The current file system to use
* @param fileStatus The {@link FileStatus} of the file
*/
   public StoreFileInfo(final Configuration conf, final FileSystem fs, final 
FileStatus fileStatus,
-  final HFileLink link)
-  throws IOException {
+  final HFileLink link) {
 this.fs = fs;
 this.conf = conf;
 // initialPath can be null only if we get a link.
@@ -163,15 +162,13 @@ public class StoreFileInfo {
 
   /**
* Create a Store File Info from an HFileLink
-   * @param conf
-   * @param fs
-   * @param fileStatus
-   * @param reference
-   * @throws IOException
+   * @param conf The {@link Configuration} to use
+   * @param fs The current file system to use
+   * @param fileStatus The {@link FileStatus} of the file
+   * @param reference The reference instance
*/
   public StoreFileInfo(final Configuration conf, final FileSystem fs, final 
FileStatus fileStatus,
-  final Reference reference)
-  throws IOException {
+  final Reference reference) {
 this.fs = fs;
 this.conf = conf;
 this.initialPath = fileStatus.getPath();
@@ -181,6 +178,24 @@ public class StoreFileInfo {
   }
 
   /**
+   * Create a Store File Info from an HFileLink and a Reference
+   * @param conf The {@link Configuration} to use
+   * @param fs The current file system to use
+   * @param fileStatus The {@link FileStatus} of the file
+   * @param reference The reference instance
+   * @param link The link instance
+   */
+  public StoreFileInfo(final Configuration conf, final FileSystem fs, final 
FileStatus fileStatus,
+  final Reference reference, final HFileLink link) {
+this.fs = fs;
+this.conf = conf;
+this.initialPath = fileStatus.getPath();
+this.createdTimestamp = fileStatus.getModificationTime();
+this.reference = reference;
+this.link = link;
+  }
+
+  /**
* Sets the region coprocessor env.
* @param coprocessorHost
*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/8a244e51/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
index 2ba1b47..2f21712 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
@@ -124,15 +124,28 @@ public class ServerRegionReplicaUtil extends 
RegionReplicaUtil {
 }
 
 // else create a store file link. The link file does not exists on 
filesystem though.
-HFileLink link = HFileLink.build(conf, 

[2/3] hbase git commit: HBASE-20006 TestRestoreSnapshotFromClientWithRegionReplicas is flakey

2018-04-20 Thread busbey
HBASE-20006 TestRestoreSnapshotFromClientWithRegionReplicas is flakey

Signed-off-by: Ted Yu 
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/22e7ae03
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/22e7ae03
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/22e7ae03

Branch: refs/heads/branch-2
Commit: 22e7ae0311efe8285b29faa3d819ab0fd5920ce4
Parents: 24eb141
Author: Toshihiro Suzuki 
Authored: Sun Mar 4 14:30:07 2018 +0900
Committer: Sean Busbey 
Committed: Fri Apr 20 11:22:49 2018 -0500

--
 .../hbase/regionserver/StoreFileInfo.java   | 37 ++--
 .../hbase/util/ServerRegionReplicaUtil.java | 27 ++
 ...oreSnapshotFromClientWithRegionReplicas.java |  2 --
 .../regionserver/TestHRegionReplayEvents.java   | 10 +++---
 4 files changed, 51 insertions(+), 25 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/22e7ae03/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
index a1fe2d1..779ca44 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
@@ -157,13 +157,12 @@ public class StoreFileInfo {
 
   /**
* Create a Store File Info from an HFileLink
-   * @param conf the {@link Configuration} to use
-   * @param fs The current file system to use.
+   * @param conf The {@link Configuration} to use
+   * @param fs The current file system to use
* @param fileStatus The {@link FileStatus} of the file
*/
   public StoreFileInfo(final Configuration conf, final FileSystem fs, final 
FileStatus fileStatus,
-  final HFileLink link)
-  throws IOException {
+  final HFileLink link) {
 this.fs = fs;
 this.conf = conf;
 // initialPath can be null only if we get a link.
@@ -175,15 +174,13 @@ public class StoreFileInfo {
 
   /**
* Create a Store File Info from an HFileLink
-   * @param conf
-   * @param fs
-   * @param fileStatus
-   * @param reference
-   * @throws IOException
+   * @param conf The {@link Configuration} to use
+   * @param fs The current file system to use
+   * @param fileStatus The {@link FileStatus} of the file
+   * @param reference The reference instance
*/
   public StoreFileInfo(final Configuration conf, final FileSystem fs, final 
FileStatus fileStatus,
-  final Reference reference)
-  throws IOException {
+  final Reference reference) {
 this.fs = fs;
 this.conf = conf;
 this.initialPath = fileStatus.getPath();
@@ -193,6 +190,24 @@ public class StoreFileInfo {
   }
 
   /**
+   * Create a Store File Info from an HFileLink and a Reference
+   * @param conf The {@link Configuration} to use
+   * @param fs The current file system to use
+   * @param fileStatus The {@link FileStatus} of the file
+   * @param reference The reference instance
+   * @param link The link instance
+   */
+  public StoreFileInfo(final Configuration conf, final FileSystem fs, final 
FileStatus fileStatus,
+  final Reference reference, final HFileLink link) {
+this.fs = fs;
+this.conf = conf;
+this.initialPath = fileStatus.getPath();
+this.createdTimestamp = fileStatus.getModificationTime();
+this.reference = reference;
+this.link = link;
+  }
+
+  /**
* Sets the region coprocessor env.
* @param coprocessorHost
*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/22e7ae03/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
index 769d480..0609733 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
@@ -128,15 +128,28 @@ public class ServerRegionReplicaUtil extends 
RegionReplicaUtil {
 }
 
 // else create a store file link. The link file does not exists on 
filesystem though.
-HFileLink link = HFileLink.build(conf, regionInfoForFs.getTable(),
-regionInfoForFs.getEncodedName(), familyName, path.getName());
-
-if (StoreFileInfo.isReference(path)) {
+if (HFileLink.isHFileLink(path) || 

[1/3] hbase git commit: HBASE-20006 TestRestoreSnapshotFromClientWithRegionReplicas is flakey

2018-04-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1 03eb3d24b -> 8a244e51d
  refs/heads/branch-2 24eb141ba -> 22e7ae031
  refs/heads/master 914de1141 -> 4e183748c


HBASE-20006 TestRestoreSnapshotFromClientWithRegionReplicas is flakey

Signed-off-by: Ted Yu 
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4e183748
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4e183748
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4e183748

Branch: refs/heads/master
Commit: 4e183748c7900da905fb3549634df614b471b3cb
Parents: 914de11
Author: Toshihiro Suzuki 
Authored: Sun Mar 4 14:30:07 2018 +0900
Committer: Sean Busbey 
Committed: Fri Apr 20 10:51:11 2018 -0500

--
 .../hbase/regionserver/StoreFileInfo.java   | 37 ++--
 .../hbase/util/ServerRegionReplicaUtil.java | 27 ++
 ...oreSnapshotFromClientWithRegionReplicas.java |  2 --
 .../regionserver/TestHRegionReplayEvents.java   | 10 +++---
 4 files changed, 51 insertions(+), 25 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4e183748/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
index e7419d5..c9f5f8d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
@@ -157,13 +157,12 @@ public class StoreFileInfo {
 
   /**
* Create a Store File Info from an HFileLink
-   * @param conf the {@link Configuration} to use
-   * @param fs The current file system to use.
+   * @param conf The {@link Configuration} to use
+   * @param fs The current file system to use
* @param fileStatus The {@link FileStatus} of the file
*/
   public StoreFileInfo(final Configuration conf, final FileSystem fs, final 
FileStatus fileStatus,
-  final HFileLink link)
-  throws IOException {
+  final HFileLink link) {
 this.fs = fs;
 this.conf = conf;
 // initialPath can be null only if we get a link.
@@ -175,15 +174,13 @@ public class StoreFileInfo {
 
   /**
* Create a Store File Info from an HFileLink
-   * @param conf
-   * @param fs
-   * @param fileStatus
-   * @param reference
-   * @throws IOException
+   * @param conf The {@link Configuration} to use
+   * @param fs The current file system to use
+   * @param fileStatus The {@link FileStatus} of the file
+   * @param reference The reference instance
*/
   public StoreFileInfo(final Configuration conf, final FileSystem fs, final 
FileStatus fileStatus,
-  final Reference reference)
-  throws IOException {
+  final Reference reference) {
 this.fs = fs;
 this.conf = conf;
 this.initialPath = fileStatus.getPath();
@@ -193,6 +190,24 @@ public class StoreFileInfo {
   }
 
   /**
+   * Create a Store File Info from an HFileLink and a Reference
+   * @param conf The {@link Configuration} to use
+   * @param fs The current file system to use
+   * @param fileStatus The {@link FileStatus} of the file
+   * @param reference The reference instance
+   * @param link The link instance
+   */
+  public StoreFileInfo(final Configuration conf, final FileSystem fs, final 
FileStatus fileStatus,
+  final Reference reference, final HFileLink link) {
+this.fs = fs;
+this.conf = conf;
+this.initialPath = fileStatus.getPath();
+this.createdTimestamp = fileStatus.getModificationTime();
+this.reference = reference;
+this.link = link;
+  }
+
+  /**
* Sets the region coprocessor env.
* @param coprocessorHost
*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/4e183748/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
index 769d480..0609733 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerRegionReplicaUtil.java
@@ -128,15 +128,28 @@ public class ServerRegionReplicaUtil extends 
RegionReplicaUtil {
 }
 
 // else create a store file link. The link file does not exists on 
filesystem though.
-HFileLink link = HFileLink.build(conf, 

hbase git commit: HBASE-20462 Put up 2.0.0RC1; Update CHANGES.md and RELEASENOTES.md; ADDENDUM; add in last few commits.

2018-04-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 add977be5 -> d8985e04b


HBASE-20462 Put up 2.0.0RC1; Update CHANGES.md and RELEASENOTES.md;
ADDENDUM; add in last few commits.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d8985e04
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d8985e04
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d8985e04

Branch: refs/heads/branch-2.0
Commit: d8985e04bf6adfc3d00660c3e04e27a5f5bacb4b
Parents: add977b
Author: Michael Stack 
Authored: Fri Apr 20 09:05:33 2018 -0700
Committer: Michael Stack 
Committed: Fri Apr 20 09:05:33 2018 -0700

--
 CHANGES.md  | 4 
 RELEASENOTES.md | 6 +-
 2 files changed, 9 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d8985e04/CHANGES.md
--
diff --git a/CHANGES.md b/CHANGES.md
index 9294c7a..9b3fed7 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -1104,6 +1104,10 @@ comment to preserve continuity of the CHANGELOG.
 
 | JIRA | Summary | Priority | Component |
 |: |: | :--- |: |
+| [HBASE-20464](https://issues.apache.org/jira/browse/HBASE-20464) | Disable 
IMC |  Major | in-memory-compaction |
+| [HBASE-20442](https://issues.apache.org/jira/browse/HBASE-20442) | clean up 
incorrect use of commons-collections 3 |  Major | dependencies, thirdparty |
+| [HBASE-20440](https://issues.apache.org/jira/browse/HBASE-20440) | Clean up 
incorrect use of commons-lang 2.y |  Major | dependencies |
+| [HBASE-20439](https://issues.apache.org/jira/browse/HBASE-20439) | Clean up 
incorrect use of commons-logging in hbase-server |  Minor | dependencies, 
logging |
 | [HBASE-20398](https://issues.apache.org/jira/browse/HBASE-20398) | Redirect 
doesn't work on web UI |  Major | UI |
 | [HBASE-20399](https://issues.apache.org/jira/browse/HBASE-20399) | Fix merge 
layout |  Minor | UI |
 | [HBASE-20233](https://issues.apache.org/jira/browse/HBASE-20233) | [metrics] 
Ill-formatted numRegions metric in 
"Hadoop:service=HBase,name=RegionServer,sub=Regions" mbean |  Trivial | 
metrics, Operability |

http://git-wip-us.apache.org/repos/asf/hbase/blob/d8985e04/RELEASENOTES.md
--
diff --git a/RELEASENOTES.md b/RELEASENOTES.md
index e79e60a..8fad483 100644
--- a/RELEASENOTES.md
+++ b/RELEASENOTES.md
@@ -45,6 +45,11 @@ comment to preserve continuity of the CHANGELOG.
 
 These release notes cover new developer and user-facing incompatibilities, 
important issues, features, and major improvements.
 
+---
+
+* [HBASE-20464](https://issues.apache.org/jira/browse/HBASE-20464) | *Major* | 
**Disable IMC**
+
+Change the default so that on creation of new tables, In-Memory Compaction 
BASIC is NOT enabled.
 
 ---
 
@@ -57,7 +62,6 @@ The command line option `--return-values` is no longer acted 
on by the shell sin
 
 Users who wish to maintain the behavior seen in the 1.4.0-1.4.2 releases of 
the HBase shell should refer to the section _irbrc_ in the reference guide for 
how to configure their IRB session to avoid echoing expression results to the 
console.
 
-
 ---
 
 * [HBASE-18792](https://issues.apache.org/jira/browse/HBASE-18792) | *Blocker* 
| **hbase-2 needs to defend against hbck operations**



hbase git commit: HBASE-20464 Disable IMC Signed-off-by: Sean Busbey <bus...@apache.org>

2018-04-20 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 2795e8bcf -> add977be5


HBASE-20464 Disable IMC
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/add977be
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/add977be
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/add977be

Branch: refs/heads/branch-2.0
Commit: add977be508b876ecc8dd980e63cd26b8bc32ef5
Parents: 2795e8b
Author: Michael Stack 
Authored: Fri Apr 20 07:01:56 2018 -0700
Committer: Michael Stack 
Committed: Fri Apr 20 08:53:01 2018 -0700

--
 .../src/main/resources/hbase-default.xml| 20 +---
 .../asciidoc/_chapters/inmemory_compaction.adoc | 11 +--
 2 files changed, 22 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/add977be/hbase-common/src/main/resources/hbase-default.xml
--
diff --git a/hbase-common/src/main/resources/hbase-default.xml 
b/hbase-common/src/main/resources/hbase-default.xml
index d7e4476..b3769c9 100644
--- a/hbase-common/src/main/resources/hbase-default.xml
+++ b/hbase-common/src/main/resources/hbase-default.xml
@@ -316,12 +316,26 @@ possible configurations would overwhelm and obscure the 
important.
 
   
   
+hbase.hregion.compacting.memstore.type
+NONE
+Determines the type of memstore to be used by user tables on
+  creation. By default it is NONE and so we use the default memstore. To 
enable
+  compacting memstore on creation of new user tables, set this property to
+  BASIC/EAGER/ADAPTIVE. All subsequent table creations will have the
+  new value of this attribute.
+  See http://hbase.apache.org/book.html#inmemory_compaction for more on
+  In-memory Compaction.
+
+  
+  
 hbase.systemtables.compacting.memstore.type
 NONE
 Determines the type of memstore to be used for system tables 
like
-  META, namespace tables etc. By default NONE is the type and hence we use 
the
-  default memstore for all the system tables. If we need to use compacting
-  memstore for system tables then set this property to BASIC/EAGER
+  META, namespace tables etc., on creation. By default NONE is the type 
and hence
+  we use the default memstore for all the system tables. If we need to 
have compacting
+  memstore on creation of system tables then set this property to 
BASIC/EAGER
+  See http://hbase.apache.org/book.html#inmemory_compaction for more on
+  In-memory Compaction.
 
   
   

http://git-wip-us.apache.org/repos/asf/hbase/blob/add977be/src/main/asciidoc/_chapters/inmemory_compaction.adoc
--
diff --git a/src/main/asciidoc/_chapters/inmemory_compaction.adoc 
b/src/main/asciidoc/_chapters/inmemory_compaction.adoc
index da3abb1..af68e45 100644
--- a/src/main/asciidoc/_chapters/inmemory_compaction.adoc
+++ b/src/main/asciidoc/_chapters/inmemory_compaction.adoc
@@ -54,7 +54,7 @@ To enable in-memory compactions, set the 
_IN_MEMORY_COMPACTION_ attribute
 on per column family where you want the behavior. The _IN_MEMORY_COMPACTION_
 attribute can have one of three values.
 
- * _NONE_: No in-memory compaction.
+ * _NONE_: No in-memory compaction. The default.
  * _BASIC_: Basic policy enables flushing and keeps a pipeline of flushes 
until we trip the pipeline maximum threshold and then we flush to disk. No 
in-memory compaction but can help throughput as data is moved from the 
profligate, native ConcurrentSkipListMap data-type to more compact (and 
efficient) data types.
  * _EAGER_: This is _BASIC_ policy plus in-memory compaction of flushes (much 
like the on-disk compactions done to hfiles); on compaction we apply on-disk 
rules eliminating versions, duplicates, ttl'd cells, etc.
  * _ADAPTIVE_: Adaptive compaction adapts to the workload. It applies either 
index compaction or data compaction based on the ratio of duplicate cells in 
the data.  Experimental.
@@ -84,13 +84,12 @@ Note how the IN_MEMORY_COMPACTION attribute shows as part 
of the _METADATA_ map.
 
 There is also a global configuration, _hbase.hregion.compacting.memstore.type_ 
which you can set in your _hbase-site.xml_ file. Use it to set the
 default on creation of a new table (On creation of a column family Store, we 
look first to the column family configuration looking for the
-_IN_MEMORY_COMPACTION_ setting, and if none, we then consult the 
_hbase.hregion.compacting.memstore.type_ value using its content; default is
-_BASIC_).
+_IN_MEMORY_COMPACTION_ setting, and if none, we then consult the 
_hbase.hregion.compacting.memstore.type_ value using its content).
 
-By default, 

[2/2] hbase git commit: HBASE-18862 apply HBASE-15109 to branch-1.2, branch-1.3

2018-04-20 Thread busbey
HBASE-18862 apply HBASE-15109 to branch-1.2,branch-1.3

Signed-off-by: Pankaj Kumar 
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/42471ddf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/42471ddf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/42471ddf

Branch: refs/heads/branch-1.2
Commit: 42471ddf1515a4715b505b4e46774572965c5835
Parents: ff1c816
Author: Yechao Chen 
Authored: Fri Sep 22 16:08:18 2017 +0800
Committer: Sean Busbey 
Committed: Fri Apr 20 10:36:42 2018 -0500

--
 .../org/apache/hadoop/hbase/regionserver/ShutdownHook.java  | 9 +
 1 file changed, 5 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/42471ddf/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
index 25ff51c..f19f26f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
@@ -200,10 +200,11 @@ public class ShutdownHook {
 throw new RuntimeException("Client finalizer is null, can't 
suppress!");
   }
   synchronized (fsShutdownHooks) {
-if (!fsShutdownHooks.containsKey(hdfsClientFinalizer) &&
-!ShutdownHookManager.deleteShutdownHook(hdfsClientFinalizer)) {
-  throw new RuntimeException("Failed suppression of fs shutdown hook: 
" +
-hdfsClientFinalizer);
+boolean isFSCacheDisabled = 
fs.getConf().getBoolean("fs.hdfs.impl.disable.cache", false);
+if (!isFSCacheDisabled && 
!fsShutdownHooks.containsKey(hdfsClientFinalizer)
+&& !ShutdownHookManager.deleteShutdownHook(hdfsClientFinalizer)) {
+  throw new RuntimeException(
+  "Failed suppression of fs shutdown hook: " + 
hdfsClientFinalizer);
 }
 Integer refs = fsShutdownHooks.get(hdfsClientFinalizer);
 fsShutdownHooks.put(hdfsClientFinalizer, refs == null ? 1 : refs + 1);



[1/2] hbase git commit: HBASE-18862 apply HBASE-15109 to branch-1.2, branch-1.3

2018-04-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 ff1c816ff -> 42471ddf1
  refs/heads/branch-1.3 2f27482b8 -> 5893c7784


HBASE-18862 apply HBASE-15109 to branch-1.2,branch-1.3

Signed-off-by: Pankaj Kumar 
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5893c778
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5893c778
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5893c778

Branch: refs/heads/branch-1.3
Commit: 5893c778489ee6d3b06fd87f5bfcee7e58cc5ba5
Parents: 2f27482
Author: Yechao Chen 
Authored: Fri Sep 22 16:08:18 2017 +0800
Committer: Sean Busbey 
Committed: Fri Apr 20 10:23:51 2018 -0500

--
 .../org/apache/hadoop/hbase/regionserver/ShutdownHook.java  | 9 +
 1 file changed, 5 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5893c778/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
index 25ff51c..f19f26f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ShutdownHook.java
@@ -200,10 +200,11 @@ public class ShutdownHook {
 throw new RuntimeException("Client finalizer is null, can't 
suppress!");
   }
   synchronized (fsShutdownHooks) {
-if (!fsShutdownHooks.containsKey(hdfsClientFinalizer) &&
-!ShutdownHookManager.deleteShutdownHook(hdfsClientFinalizer)) {
-  throw new RuntimeException("Failed suppression of fs shutdown hook: 
" +
-hdfsClientFinalizer);
+boolean isFSCacheDisabled = 
fs.getConf().getBoolean("fs.hdfs.impl.disable.cache", false);
+if (!isFSCacheDisabled && 
!fsShutdownHooks.containsKey(hdfsClientFinalizer)
+&& !ShutdownHookManager.deleteShutdownHook(hdfsClientFinalizer)) {
+  throw new RuntimeException(
+  "Failed suppression of fs shutdown hook: " + 
hdfsClientFinalizer);
 }
 Integer refs = fsShutdownHooks.get(hdfsClientFinalizer);
 fsShutdownHooks.put(hdfsClientFinalizer, refs == null ? 1 : refs + 1);



[27/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
Published site at 914de1141699142bce1486468a742233d9440b23.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/1facf1d3
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/1facf1d3
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/1facf1d3

Branch: refs/heads/asf-site
Commit: 1facf1d3a2f9f2ac8acde077e1816c9783f88f87
Parents: 12c47ed
Author: jenkins 
Authored: Fri Apr 20 14:46:29 2018 +
Committer: jenkins 
Committed: Fri Apr 20 14:46:29 2018 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 33165 +
 .../hadoop/hbase/client/RowMutations.html   |28 +-
 .../hadoop/hbase/client/RowMutations.html   |   317 +-
 .../org/apache/hadoop/hbase/net/Address.html| 2 +-
 .../hbase/util/Bytes.ByteArrayComparator.html   |24 +-
 .../hbase/util/Bytes.RowEndKeyComparator.html   |24 +-
 .../org/apache/hadoop/hbase/util/Bytes.html |24 +-
 book.html   |   214 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   |10 +-
 coc.html| 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/constant-values.html | 6 +-
 .../hbase/backup/master/BackupLogCleaner.html   |20 +-
 .../hadoop/hbase/backup/package-tree.html   | 4 +-
 .../hadoop/hbase/client/RowMutations.html   |32 +-
 .../hadoop/hbase/client/package-tree.html   |18 +-
 .../hadoop/hbase/executor/package-tree.html | 2 +-
 .../hadoop/hbase/filter/package-tree.html   |10 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 6 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 2 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 4 +-
 .../hbase/master/balancer/package-tree.html | 2 +-
 .../hadoop/hbase/master/package-tree.html   | 4 +-
 .../hbase/master/procedure/package-tree.html| 4 +-
 .../ClientZKSyncer.ClientZkUpdater.html |10 +-
 .../hbase/master/zksyncer/ClientZKSyncer.html   |34 +-
 .../org/apache/hadoop/hbase/package-tree.html   |16 +-
 .../hadoop/hbase/procedure2/package-tree.html   | 4 +-
 ...QuotaSnapshotSizeSerializationException.html | 6 +-
 ...leArchiverNotifierImpl.SnapshotWithSize.html |18 +-
 ...ArchiverNotifierImpl.StoreFileReference.html |20 +-
 .../hbase/quotas/FileArchiverNotifierImpl.html  |58 +-
 .../hbase/quotas/RegionSizeReportingChore.html  |38 +-
 .../hbase/quotas/RegionSizeStoreImpl.html   |30 +-
 .../hadoop/hbase/quotas/package-tree.html   | 6 +-
 .../HRegion.FlushResult.Result.html | 4 +-
 .../hadoop/hbase/regionserver/package-tree.html |18 +-
 .../regionserver/querymatcher/package-tree.html | 2 +-
 .../throttle/StoreHotnessProtector.html |46 +-
 .../hbase/security/access/package-tree.html | 2 +-
 .../hadoop/hbase/security/package-tree.html | 2 +-
 .../apache/hadoop/hbase/util/package-tree.html  |10 +-
 .../apache/hadoop/hbase/wal/package-tree.html   | 2 +-
 .../org/apache/hadoop/hbase/Version.html| 6 +-
 .../hbase/backup/master/BackupLogCleaner.html   |   235 +-
 .../hadoop/hbase/client/RowMutations.html   |   317 +-
 .../ClientZKSyncer.ClientZkUpdater.html |   429 +-
 .../hbase/master/zksyncer/ClientZKSyncer.html   |   429 +-
 .../org/apache/hadoop/hbase/net/Address.html| 2 +-
 ...ileArchiverNotifierFactoryImpl.CacheKey.html | 2 +-
 .../quotas/FileArchiverNotifierFactoryImpl.html | 2 +-
 ...QuotaSnapshotSizeSerializationException.html |  1197 +-
 ...leArchiverNotifierImpl.SnapshotWithSize.html |  1197 +-
 ...ArchiverNotifierImpl.StoreFileReference.html |  1197 +-
 .../hbase/quotas/FileArchiverNotifierImpl.html  |  1197 +-
 .../hbase/quotas/RegionSizeReportingChore.html  |   263 +-
 .../hbase/quotas/RegionSizeStoreImpl.html   |   167 +-
 .../HRegion.BatchOperation.Visitor.html |   236 +-
 .../regionserver/HRegion.BatchOperation.html|   236 +-
 .../regionserver/HRegion.BulkLoadListener.html  |   236 +-
 .../HRegion.FlushResult.Result.html |   236 +-
 .../hbase/regionserver/HRegion.FlushResult.html |   236 +-
 .../regionserver/HRegion.FlushResultImpl.html   |   236 +-
 .../HRegion.MutationBatchOperation.html |   236 +-
 .../HRegion.ObservedExceptionsInBatch.html  |   236 +-
 .../HRegion.PrepareFlushResult.html |   236 +-
 .../regionserver/HRegion.RegionScannerImpl.html |   236 

[14/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
index 7137829..4a879bb 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.BatchOperation.html
@@ -78,124 +78,124 @@
 070import 
java.util.concurrent.locks.ReadWriteLock;
 071import 
java.util.concurrent.locks.ReentrantReadWriteLock;
 072import java.util.function.Function;
-073import 
org.apache.commons.collections.CollectionUtils;
-074import 
org.apache.hadoop.conf.Configuration;
-075import org.apache.hadoop.fs.FileStatus;
-076import org.apache.hadoop.fs.FileSystem;
-077import 
org.apache.hadoop.fs.LocatedFileStatus;
-078import org.apache.hadoop.fs.Path;
-079import org.apache.hadoop.hbase.Cell;
-080import 
org.apache.hadoop.hbase.CellBuilderType;
-081import 
org.apache.hadoop.hbase.CellComparator;
-082import 
org.apache.hadoop.hbase.CellComparatorImpl;
-083import 
org.apache.hadoop.hbase.CellScanner;
-084import 
org.apache.hadoop.hbase.CellUtil;
-085import 
org.apache.hadoop.hbase.CompareOperator;
-086import 
org.apache.hadoop.hbase.CompoundConfiguration;
-087import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-088import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-089import 
org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
-090import 
org.apache.hadoop.hbase.HConstants;
-091import 
org.apache.hadoop.hbase.HConstants.OperationStatusCode;
-092import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-093import 
org.apache.hadoop.hbase.KeyValue;
-094import 
org.apache.hadoop.hbase.KeyValueUtil;
-095import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-096import 
org.apache.hadoop.hbase.NotServingRegionException;
-097import 
org.apache.hadoop.hbase.PrivateCellUtil;
-098import 
org.apache.hadoop.hbase.RegionTooBusyException;
-099import 
org.apache.hadoop.hbase.TableName;
-100import org.apache.hadoop.hbase.Tag;
-101import org.apache.hadoop.hbase.TagUtil;
-102import 
org.apache.hadoop.hbase.UnknownScannerException;
-103import 
org.apache.hadoop.hbase.client.Append;
-104import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-105import 
org.apache.hadoop.hbase.client.CompactionState;
-106import 
org.apache.hadoop.hbase.client.Delete;
-107import 
org.apache.hadoop.hbase.client.Durability;
-108import 
org.apache.hadoop.hbase.client.Get;
-109import 
org.apache.hadoop.hbase.client.Increment;
-110import 
org.apache.hadoop.hbase.client.IsolationLevel;
-111import 
org.apache.hadoop.hbase.client.Mutation;
-112import 
org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
-113import 
org.apache.hadoop.hbase.client.Put;
-114import 
org.apache.hadoop.hbase.client.RegionInfo;
-115import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-116import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-117import 
org.apache.hadoop.hbase.client.Result;
-118import 
org.apache.hadoop.hbase.client.RowMutations;
-119import 
org.apache.hadoop.hbase.client.Scan;
-120import 
org.apache.hadoop.hbase.client.TableDescriptor;
-121import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-122import 
org.apache.hadoop.hbase.conf.ConfigurationManager;
-123import 
org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
-124import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-125import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
-126import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-127import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-128import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-129import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-130import 
org.apache.hadoop.hbase.filter.FilterWrapper;
-131import 
org.apache.hadoop.hbase.filter.IncompatibleFilterException;
-132import 
org.apache.hadoop.hbase.io.HFileLink;
-133import 
org.apache.hadoop.hbase.io.HeapSize;
-134import 
org.apache.hadoop.hbase.io.TimeRange;
-135import 
org.apache.hadoop.hbase.io.hfile.HFile;
-136import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
-137import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-138import 
org.apache.hadoop.hbase.ipc.RpcCall;
-139import 
org.apache.hadoop.hbase.ipc.RpcServer;
-140import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-141import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-142import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-143import 
org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
-144import 
org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;
-145import 

hbase-site git commit: INFRA-10751 Empty commit

2018-04-20 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 1facf1d3a -> 84a296a43


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/84a296a4
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/84a296a4
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/84a296a4

Branch: refs/heads/asf-site
Commit: 84a296a433e062a688e7bdd3f6d586dc9a9e0835
Parents: 1facf1d
Author: jenkins 
Authored: Fri Apr 20 14:46:44 2018 +
Committer: jenkins 
Committed: Fri Apr 20 14:46:44 2018 +

--

--




[13/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
index 7137829..4a879bb 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.FlushResult.html
@@ -78,124 +78,124 @@
 070import 
java.util.concurrent.locks.ReadWriteLock;
 071import 
java.util.concurrent.locks.ReentrantReadWriteLock;
 072import java.util.function.Function;
-073import 
org.apache.commons.collections.CollectionUtils;
-074import 
org.apache.hadoop.conf.Configuration;
-075import org.apache.hadoop.fs.FileStatus;
-076import org.apache.hadoop.fs.FileSystem;
-077import 
org.apache.hadoop.fs.LocatedFileStatus;
-078import org.apache.hadoop.fs.Path;
-079import org.apache.hadoop.hbase.Cell;
-080import 
org.apache.hadoop.hbase.CellBuilderType;
-081import 
org.apache.hadoop.hbase.CellComparator;
-082import 
org.apache.hadoop.hbase.CellComparatorImpl;
-083import 
org.apache.hadoop.hbase.CellScanner;
-084import 
org.apache.hadoop.hbase.CellUtil;
-085import 
org.apache.hadoop.hbase.CompareOperator;
-086import 
org.apache.hadoop.hbase.CompoundConfiguration;
-087import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-088import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-089import 
org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
-090import 
org.apache.hadoop.hbase.HConstants;
-091import 
org.apache.hadoop.hbase.HConstants.OperationStatusCode;
-092import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-093import 
org.apache.hadoop.hbase.KeyValue;
-094import 
org.apache.hadoop.hbase.KeyValueUtil;
-095import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-096import 
org.apache.hadoop.hbase.NotServingRegionException;
-097import 
org.apache.hadoop.hbase.PrivateCellUtil;
-098import 
org.apache.hadoop.hbase.RegionTooBusyException;
-099import 
org.apache.hadoop.hbase.TableName;
-100import org.apache.hadoop.hbase.Tag;
-101import org.apache.hadoop.hbase.TagUtil;
-102import 
org.apache.hadoop.hbase.UnknownScannerException;
-103import 
org.apache.hadoop.hbase.client.Append;
-104import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-105import 
org.apache.hadoop.hbase.client.CompactionState;
-106import 
org.apache.hadoop.hbase.client.Delete;
-107import 
org.apache.hadoop.hbase.client.Durability;
-108import 
org.apache.hadoop.hbase.client.Get;
-109import 
org.apache.hadoop.hbase.client.Increment;
-110import 
org.apache.hadoop.hbase.client.IsolationLevel;
-111import 
org.apache.hadoop.hbase.client.Mutation;
-112import 
org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
-113import 
org.apache.hadoop.hbase.client.Put;
-114import 
org.apache.hadoop.hbase.client.RegionInfo;
-115import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-116import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-117import 
org.apache.hadoop.hbase.client.Result;
-118import 
org.apache.hadoop.hbase.client.RowMutations;
-119import 
org.apache.hadoop.hbase.client.Scan;
-120import 
org.apache.hadoop.hbase.client.TableDescriptor;
-121import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-122import 
org.apache.hadoop.hbase.conf.ConfigurationManager;
-123import 
org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
-124import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-125import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
-126import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-127import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-128import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-129import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-130import 
org.apache.hadoop.hbase.filter.FilterWrapper;
-131import 
org.apache.hadoop.hbase.filter.IncompatibleFilterException;
-132import 
org.apache.hadoop.hbase.io.HFileLink;
-133import 
org.apache.hadoop.hbase.io.HeapSize;
-134import 
org.apache.hadoop.hbase.io.TimeRange;
-135import 
org.apache.hadoop.hbase.io.hfile.HFile;
-136import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
-137import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-138import 
org.apache.hadoop.hbase.ipc.RpcCall;
-139import 
org.apache.hadoop.hbase.ipc.RpcServer;
-140import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-141import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-142import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-143import 
org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
-144import 
org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;
-145import 

[05/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.SinkWriter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.SinkWriter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.SinkWriter.html
index 9f6a1bd..99f53c4 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.SinkWriter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.SinkWriter.html
@@ -54,55 +54,55 @@
 046import 
java.util.concurrent.atomic.AtomicReference;
 047import java.util.regex.Matcher;
 048import java.util.regex.Pattern;
-049import 
org.apache.commons.collections.CollectionUtils;
-050import 
org.apache.commons.collections.MapUtils;
-051import 
org.apache.commons.lang3.ArrayUtils;
-052import 
org.apache.hadoop.conf.Configuration;
-053import 
org.apache.hadoop.fs.FileAlreadyExistsException;
-054import org.apache.hadoop.fs.FileStatus;
-055import org.apache.hadoop.fs.FileSystem;
-056import org.apache.hadoop.fs.Path;
-057import org.apache.hadoop.fs.PathFilter;
-058import org.apache.hadoop.hbase.Cell;
-059import 
org.apache.hadoop.hbase.CellScanner;
-060import 
org.apache.hadoop.hbase.CellUtil;
-061import 
org.apache.hadoop.hbase.HBaseConfiguration;
-062import 
org.apache.hadoop.hbase.HConstants;
-063import 
org.apache.hadoop.hbase.TableName;
-064import 
org.apache.hadoop.hbase.client.Delete;
-065import 
org.apache.hadoop.hbase.client.Durability;
-066import 
org.apache.hadoop.hbase.client.Mutation;
-067import 
org.apache.hadoop.hbase.client.Put;
-068import 
org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;
-069import 
org.apache.hadoop.hbase.io.HeapSize;
-070import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-071import 
org.apache.hadoop.hbase.master.SplitLogManager;
-072import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-073import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-074import 
org.apache.hadoop.hbase.regionserver.HRegion;
-075import 
org.apache.hadoop.hbase.regionserver.LastSequenceId;
-076import 
org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;
-077import 
org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;
-078import 
org.apache.hadoop.hbase.util.Bytes;
-079import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-080import 
org.apache.hadoop.hbase.util.ClassSize;
-081import 
org.apache.hadoop.hbase.util.FSUtils;
-082import 
org.apache.hadoop.hbase.util.Pair;
-083import 
org.apache.hadoop.hbase.util.Threads;
-084import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-085import 
org.apache.hadoop.hbase.wal.WAL.Reader;
-086import 
org.apache.hadoop.hbase.wal.WALProvider.Writer;
-087import 
org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
-088import 
org.apache.hadoop.io.MultipleIOException;
-089import 
org.apache.hadoop.ipc.RemoteException;
-090import 
org.apache.yetus.audience.InterfaceAudience;
-091import org.slf4j.Logger;
-092import org.slf4j.LoggerFactory;
-093
-094import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-095import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-096import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-097import 
org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;
+049import 
org.apache.commons.lang3.ArrayUtils;
+050import 
org.apache.hadoop.conf.Configuration;
+051import 
org.apache.hadoop.fs.FileAlreadyExistsException;
+052import org.apache.hadoop.fs.FileStatus;
+053import org.apache.hadoop.fs.FileSystem;
+054import org.apache.hadoop.fs.Path;
+055import org.apache.hadoop.fs.PathFilter;
+056import org.apache.hadoop.hbase.Cell;
+057import 
org.apache.hadoop.hbase.CellScanner;
+058import 
org.apache.hadoop.hbase.CellUtil;
+059import 
org.apache.hadoop.hbase.HBaseConfiguration;
+060import 
org.apache.hadoop.hbase.HConstants;
+061import 
org.apache.hadoop.hbase.TableName;
+062import 
org.apache.hadoop.hbase.client.Delete;
+063import 
org.apache.hadoop.hbase.client.Durability;
+064import 
org.apache.hadoop.hbase.client.Mutation;
+065import 
org.apache.hadoop.hbase.client.Put;
+066import 
org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;
+067import 
org.apache.hadoop.hbase.io.HeapSize;
+068import 
org.apache.hadoop.hbase.log.HBaseMarkers;
+069import 
org.apache.hadoop.hbase.master.SplitLogManager;
+070import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
+071import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
+072import 
org.apache.hadoop.hbase.regionserver.HRegion;
+073import 
org.apache.hadoop.hbase.regionserver.LastSequenceId;
+074import 
org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;
+075import 
org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;
+076import 
org.apache.hadoop.hbase.util.Bytes;
+077import 
org.apache.hadoop.hbase.util.CancelableProgressable;
+078import 
org.apache.hadoop.hbase.util.ClassSize;
+079import 

[02/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html
index c06f057..265908c 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.CreateFailObserver.html
@@ -31,136 +31,137 @@
 023import java.io.IOException;
 024import java.util.List;
 025import java.util.Optional;
-026import org.apache.commons.logging.Log;
-027import 
org.apache.commons.logging.LogFactory;
-028import 
org.apache.hadoop.conf.Configuration;
-029import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-030import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-031import 
org.apache.hadoop.hbase.TableName;
-032import 
org.apache.hadoop.hbase.client.RegionInfo;
-033import 
org.apache.hadoop.hbase.client.TableDescriptor;
-034import 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
-035import 
org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
-036import 
org.apache.hadoop.hbase.coprocessor.MasterObserver;
-037import 
org.apache.hadoop.hbase.coprocessor.ObserverContext;
-038import 
org.apache.hadoop.hbase.procedure2.Procedure;
-039import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-040import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-041import 
org.apache.hadoop.hbase.util.Bytes;
-042import org.junit.After;
-043import org.junit.BeforeClass;
-044import org.junit.ClassRule;
-045import org.junit.Test;
-046import 
org.junit.experimental.categories.Category;
-047
-048import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
-049
-050/**
-051 * Check if CompletedProcedureCleaner 
cleans up failed nonce procedures.
-052 */
-053@Category(MediumTests.class)
-054public class TestFailedProcCleanup {
-055
-056  @ClassRule
-057  public static final HBaseClassTestRule 
CLASS_RULE =
-058  
HBaseClassTestRule.forClass(TestFailedProcCleanup.class);
-059
-060  private static final Log LOG = 
LogFactory.getLog(TestFailedProcCleanup.class);
-061
-062  protected static HBaseTestingUtility 
TEST_UTIL = new HBaseTestingUtility();
-063  private static Configuration conf;
-064  private static final TableName TABLE = 
TableName.valueOf("test");
-065  private static final byte[] FAMILY = 
Bytes.toBytesBinary("f");
-066  private static final int evictionDelay 
= 10 * 1000;
-067
-068  @BeforeClass
-069  public static void setUpBeforeClass() 
{
-070conf = 
TEST_UTIL.getConfiguration();
-071
conf.setInt("hbase.procedure.cleaner.evict.ttl", evictionDelay);
-072
conf.setInt("hbase.procedure.cleaner.evict.batch.size", 1);
-073  }
-074
-075  @After
-076  public void tearDown() throws Exception 
{
-077TEST_UTIL.shutdownMiniCluster();
-078  }
-079
-080  @Test
-081  public void testFailCreateTable() 
throws Exception {
-082conf.set(MASTER_COPROCESSOR_CONF_KEY, 
CreateFailObserver.class.getName());
-083TEST_UTIL.startMiniCluster(3);
-084try {
-085  TEST_UTIL.createTable(TABLE, 
FAMILY);
-086} catch (AccessDeniedException e) {
-087  LOG.debug("Ignoring exception: ", 
e);
-088  Thread.sleep(evictionDelay * 3);
-089}
-090ListProcedure? 
procedureInfos =
-091
TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getProcedures();
-092for (Procedure procedureInfo : 
procedureInfos) {
-093  if 
(procedureInfo.getProcName().equals("CreateTableProcedure")
-094   
procedureInfo.getState() == ProcedureProtos.ProcedureState.ROLLEDBACK) {
-095fail("Found procedure " + 
procedureInfo + " that hasn't been cleaned up");
-096  }
-097}
-098  }
-099
-100  @Test
-101  public void testFailCreateTableAction() 
throws Exception {
-102conf.set(MASTER_COPROCESSOR_CONF_KEY, 
CreateFailObserverHandler.class.getName());
-103TEST_UTIL.startMiniCluster(3);
-104try {
-105  TEST_UTIL.createTable(TABLE, 
FAMILY);
-106  fail("Table shouldn't be 
created");
-107} catch (AccessDeniedException e) {
-108  LOG.debug("Ignoring exception: ", 
e);
-109  Thread.sleep(evictionDelay * 3);
-110}
-111ListProcedure? 
procedureInfos =
-112
TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterProcedureExecutor().getProcedures();
-113for (Procedure procedureInfo : 
procedureInfos) {
-114  if 
(procedureInfo.getProcName().equals("CreateTableProcedure")
-115   
procedureInfo.getState() == ProcedureProtos.ProcedureState.ROLLEDBACK) {
-116fail("Found procedure " + 
procedureInfo + " that hasn't been cleaned up");
-117  }
-118}
-119  }

[06/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
index 9f6a1bd..99f53c4 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/wal/WALSplitter.CorruptedLogFileException.html
@@ -54,55 +54,55 @@
 046import 
java.util.concurrent.atomic.AtomicReference;
 047import java.util.regex.Matcher;
 048import java.util.regex.Pattern;
-049import 
org.apache.commons.collections.CollectionUtils;
-050import 
org.apache.commons.collections.MapUtils;
-051import 
org.apache.commons.lang3.ArrayUtils;
-052import 
org.apache.hadoop.conf.Configuration;
-053import 
org.apache.hadoop.fs.FileAlreadyExistsException;
-054import org.apache.hadoop.fs.FileStatus;
-055import org.apache.hadoop.fs.FileSystem;
-056import org.apache.hadoop.fs.Path;
-057import org.apache.hadoop.fs.PathFilter;
-058import org.apache.hadoop.hbase.Cell;
-059import 
org.apache.hadoop.hbase.CellScanner;
-060import 
org.apache.hadoop.hbase.CellUtil;
-061import 
org.apache.hadoop.hbase.HBaseConfiguration;
-062import 
org.apache.hadoop.hbase.HConstants;
-063import 
org.apache.hadoop.hbase.TableName;
-064import 
org.apache.hadoop.hbase.client.Delete;
-065import 
org.apache.hadoop.hbase.client.Durability;
-066import 
org.apache.hadoop.hbase.client.Mutation;
-067import 
org.apache.hadoop.hbase.client.Put;
-068import 
org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;
-069import 
org.apache.hadoop.hbase.io.HeapSize;
-070import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-071import 
org.apache.hadoop.hbase.master.SplitLogManager;
-072import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-073import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-074import 
org.apache.hadoop.hbase.regionserver.HRegion;
-075import 
org.apache.hadoop.hbase.regionserver.LastSequenceId;
-076import 
org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;
-077import 
org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;
-078import 
org.apache.hadoop.hbase.util.Bytes;
-079import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-080import 
org.apache.hadoop.hbase.util.ClassSize;
-081import 
org.apache.hadoop.hbase.util.FSUtils;
-082import 
org.apache.hadoop.hbase.util.Pair;
-083import 
org.apache.hadoop.hbase.util.Threads;
-084import 
org.apache.hadoop.hbase.wal.WAL.Entry;
-085import 
org.apache.hadoop.hbase.wal.WAL.Reader;
-086import 
org.apache.hadoop.hbase.wal.WALProvider.Writer;
-087import 
org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
-088import 
org.apache.hadoop.io.MultipleIOException;
-089import 
org.apache.hadoop.ipc.RemoteException;
-090import 
org.apache.yetus.audience.InterfaceAudience;
-091import org.slf4j.Logger;
-092import org.slf4j.LoggerFactory;
-093
-094import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-095import 
org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
-096import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-097import 
org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;
+049import 
org.apache.commons.lang3.ArrayUtils;
+050import 
org.apache.hadoop.conf.Configuration;
+051import 
org.apache.hadoop.fs.FileAlreadyExistsException;
+052import org.apache.hadoop.fs.FileStatus;
+053import org.apache.hadoop.fs.FileSystem;
+054import org.apache.hadoop.fs.Path;
+055import org.apache.hadoop.fs.PathFilter;
+056import org.apache.hadoop.hbase.Cell;
+057import 
org.apache.hadoop.hbase.CellScanner;
+058import 
org.apache.hadoop.hbase.CellUtil;
+059import 
org.apache.hadoop.hbase.HBaseConfiguration;
+060import 
org.apache.hadoop.hbase.HConstants;
+061import 
org.apache.hadoop.hbase.TableName;
+062import 
org.apache.hadoop.hbase.client.Delete;
+063import 
org.apache.hadoop.hbase.client.Durability;
+064import 
org.apache.hadoop.hbase.client.Mutation;
+065import 
org.apache.hadoop.hbase.client.Put;
+066import 
org.apache.hadoop.hbase.coordination.SplitLogWorkerCoordination;
+067import 
org.apache.hadoop.hbase.io.HeapSize;
+068import 
org.apache.hadoop.hbase.log.HBaseMarkers;
+069import 
org.apache.hadoop.hbase.master.SplitLogManager;
+070import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
+071import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
+072import 
org.apache.hadoop.hbase.regionserver.HRegion;
+073import 
org.apache.hadoop.hbase.regionserver.LastSequenceId;
+074import 
org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;
+075import 
org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;
+076import 
org.apache.hadoop.hbase.util.Bytes;
+077import 
org.apache.hadoop.hbase.util.CancelableProgressable;

[22/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html 
b/devapidocs/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
index 7b4bdcd..a5b07e4 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class RegionSizeReportingChore
+public class RegionSizeReportingChore
 extends ScheduledChore
 A Chore which sends the region size reports on this 
RegionServer to the Master.
 
@@ -141,7 +141,7 @@ extends Field and Description
 
 
-private static 
org.apache.commons.logging.Log
+private static org.slf4j.Logger
 LOG
 
 
@@ -280,7 +280,7 @@ extends 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -289,7 +289,7 @@ extends 
 
 REGION_SIZE_REPORTING_CHORE_PERIOD_KEY
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_SIZE_REPORTING_CHORE_PERIOD_KEY
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_SIZE_REPORTING_CHORE_PERIOD_KEY
 
 See Also:
 Constant
 Field Values
@@ -302,7 +302,7 @@ extends 
 
 REGION_SIZE_REPORTING_CHORE_PERIOD_DEFAULT
-static finalint REGION_SIZE_REPORTING_CHORE_PERIOD_DEFAULT
+static finalint REGION_SIZE_REPORTING_CHORE_PERIOD_DEFAULT
 
 See Also:
 Constant
 Field Values
@@ -315,7 +315,7 @@ extends 
 
 REGION_SIZE_REPORTING_CHORE_DELAY_KEY
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_SIZE_REPORTING_CHORE_DELAY_KEY
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_SIZE_REPORTING_CHORE_DELAY_KEY
 
 See Also:
 Constant
 Field Values
@@ -328,7 +328,7 @@ extends 
 
 REGION_SIZE_REPORTING_CHORE_DELAY_DEFAULT
-static finallong REGION_SIZE_REPORTING_CHORE_DELAY_DEFAULT
+static finallong REGION_SIZE_REPORTING_CHORE_DELAY_DEFAULT
 
 See Also:
 Constant
 Field Values
@@ -341,7 +341,7 @@ extends 
 
 REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY
 
 See Also:
 Constant
 Field Values
@@ -354,7 +354,7 @@ extends 
 
 REGION_SIZE_REPORTING_CHORE_TIMEUNIT_DEFAULT
-static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_SIZE_REPORTING_CHORE_TIMEUNIT_DEFAULT
+static finalhttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String REGION_SIZE_REPORTING_CHORE_TIMEUNIT_DEFAULT
 
 
 
@@ -363,7 +363,7 @@ extends 
 
 rsServices
-private finalRegionServerServices rsServices
+private finalRegionServerServices rsServices
 
 
 
@@ -372,7 +372,7 @@ extends 
 
 metrics
-private finalMetricsRegionServer 
metrics
+private finalMetricsRegionServer 
metrics
 
 
 
@@ -389,7 +389,7 @@ extends 
 
 RegionSizeReportingChore
-publicRegionSizeReportingChore(RegionServerServicesrsServices)
+publicRegionSizeReportingChore(RegionServerServicesrsServices)
 
 
 
@@ -406,7 +406,7 @@ extends 
 
 chore
-protectedvoidchore()
+protectedvoidchore()
 Description copied from 
class:ScheduledChore
 The task to execute on each scheduled execution of the 
Chore
 
@@ -421,7 +421,7 @@ extends 
 
 _chore
-void_chore()
+void_chore()
 
 
 
@@ -430,7 +430,7 @@ extends 
 
 getOnlineRegionInfos
-https://docs.oracle.com/javase/8/docs/api/java/util/HashSet.html?is-external=true;
 title="class or interface in java.util">HashSetRegionInfogetOnlineRegionInfos(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends RegiononlineRegions)
+https://docs.oracle.com/javase/8/docs/api/java/util/HashSet.html?is-external=true;
 title="class or interface in java.util">HashSetRegionInfogetOnlineRegionInfos(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends RegiononlineRegions)
 
 
 
@@ -439,7 +439,7 @@ extends 
 
 removeNonOnlineRegions
-voidremoveNonOnlineRegions(RegionSizeStorestore,

[11/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
index 7137829..4a879bb 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ReplayBatchOperation.html
@@ -78,124 +78,124 @@
 070import 
java.util.concurrent.locks.ReadWriteLock;
 071import 
java.util.concurrent.locks.ReentrantReadWriteLock;
 072import java.util.function.Function;
-073import 
org.apache.commons.collections.CollectionUtils;
-074import 
org.apache.hadoop.conf.Configuration;
-075import org.apache.hadoop.fs.FileStatus;
-076import org.apache.hadoop.fs.FileSystem;
-077import 
org.apache.hadoop.fs.LocatedFileStatus;
-078import org.apache.hadoop.fs.Path;
-079import org.apache.hadoop.hbase.Cell;
-080import 
org.apache.hadoop.hbase.CellBuilderType;
-081import 
org.apache.hadoop.hbase.CellComparator;
-082import 
org.apache.hadoop.hbase.CellComparatorImpl;
-083import 
org.apache.hadoop.hbase.CellScanner;
-084import 
org.apache.hadoop.hbase.CellUtil;
-085import 
org.apache.hadoop.hbase.CompareOperator;
-086import 
org.apache.hadoop.hbase.CompoundConfiguration;
-087import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-088import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-089import 
org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
-090import 
org.apache.hadoop.hbase.HConstants;
-091import 
org.apache.hadoop.hbase.HConstants.OperationStatusCode;
-092import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-093import 
org.apache.hadoop.hbase.KeyValue;
-094import 
org.apache.hadoop.hbase.KeyValueUtil;
-095import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-096import 
org.apache.hadoop.hbase.NotServingRegionException;
-097import 
org.apache.hadoop.hbase.PrivateCellUtil;
-098import 
org.apache.hadoop.hbase.RegionTooBusyException;
-099import 
org.apache.hadoop.hbase.TableName;
-100import org.apache.hadoop.hbase.Tag;
-101import org.apache.hadoop.hbase.TagUtil;
-102import 
org.apache.hadoop.hbase.UnknownScannerException;
-103import 
org.apache.hadoop.hbase.client.Append;
-104import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-105import 
org.apache.hadoop.hbase.client.CompactionState;
-106import 
org.apache.hadoop.hbase.client.Delete;
-107import 
org.apache.hadoop.hbase.client.Durability;
-108import 
org.apache.hadoop.hbase.client.Get;
-109import 
org.apache.hadoop.hbase.client.Increment;
-110import 
org.apache.hadoop.hbase.client.IsolationLevel;
-111import 
org.apache.hadoop.hbase.client.Mutation;
-112import 
org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
-113import 
org.apache.hadoop.hbase.client.Put;
-114import 
org.apache.hadoop.hbase.client.RegionInfo;
-115import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-116import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-117import 
org.apache.hadoop.hbase.client.Result;
-118import 
org.apache.hadoop.hbase.client.RowMutations;
-119import 
org.apache.hadoop.hbase.client.Scan;
-120import 
org.apache.hadoop.hbase.client.TableDescriptor;
-121import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-122import 
org.apache.hadoop.hbase.conf.ConfigurationManager;
-123import 
org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
-124import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-125import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
-126import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-127import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-128import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-129import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-130import 
org.apache.hadoop.hbase.filter.FilterWrapper;
-131import 
org.apache.hadoop.hbase.filter.IncompatibleFilterException;
-132import 
org.apache.hadoop.hbase.io.HFileLink;
-133import 
org.apache.hadoop.hbase.io.HeapSize;
-134import 
org.apache.hadoop.hbase.io.TimeRange;
-135import 
org.apache.hadoop.hbase.io.hfile.HFile;
-136import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
-137import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-138import 
org.apache.hadoop.hbase.ipc.RpcCall;
-139import 
org.apache.hadoop.hbase.ipc.RpcServer;
-140import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-141import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-142import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-143import 
org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
-144import 
org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;
-145import 

[19/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html
index e080cd6..73a1036 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.QuotaSnapshotSizeSerializationException.html
@@ -42,604 +42,605 @@
 034import java.util.function.Predicate;
 035import java.util.stream.Collectors;
 036
-037import 
org.apache.commons.lang.builder.HashCodeBuilder;
-038import org.apache.commons.logging.Log;
-039import 
org.apache.commons.logging.LogFactory;
-040import 
org.apache.hadoop.conf.Configuration;
-041import org.apache.hadoop.fs.FileStatus;
-042import org.apache.hadoop.fs.FileSystem;
-043import org.apache.hadoop.fs.Path;
-044import 
org.apache.hadoop.hbase.TableName;
-045import 
org.apache.hadoop.hbase.client.Connection;
-046import 
org.apache.hadoop.hbase.client.Get;
-047import 
org.apache.hadoop.hbase.client.Put;
-048import 
org.apache.hadoop.hbase.client.Result;
-049import 
org.apache.hadoop.hbase.client.Table;
-050import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-051import 
org.apache.hadoop.hbase.snapshot.SnapshotManifest;
-052import 
org.apache.hadoop.hbase.util.FSUtils;
-053import 
org.apache.hadoop.hbase.util.HFileArchiveUtil;
-054import 
org.apache.hadoop.util.StringUtils;
-055import 
org.apache.yetus.audience.InterfaceAudience;
-056
-057import 
org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap;
-058import 
org.apache.hbase.thirdparty.com.google.common.collect.Multimap;
-059import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-060
-061import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-062import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile;
-066
-067/**
-068 * Tracks file archiving and updates the 
hbase quota table.
-069 */
-070@InterfaceAudience.Private
-071public class FileArchiverNotifierImpl 
implements FileArchiverNotifier {
-072  private static final Log LOG = 
LogFactory.getLog(FileArchiverNotifierImpl.class);
-073  private final Connection conn;
-074  private final Configuration conf;
-075  private final FileSystem fs;
-076  private final TableName tn;
-077  private final ReadLock readLock;
-078  private final WriteLock writeLock;
-079  private volatile long lastFullCompute = 
Long.MIN_VALUE;
-080  private ListString 
currentSnapshots = Collections.emptyList();
-081  private static final 
MapString,Object NAMESPACE_LOCKS = new HashMap();
-082
-083  /**
-084   * An Exception thrown when 
SnapshotSize updates to hbase:quota fail to be written.
-085   */
-086  @InterfaceAudience.Private
-087  public static class 
QuotaSnapshotSizeSerializationException extends IOException {
-088private static final long 
serialVersionUID = 1L;
-089
-090public 
QuotaSnapshotSizeSerializationException(String msg) {
-091  super(msg);
-092}
-093  }
-094
-095  public FileArchiverNotifierImpl(
-096  Connection conn, Configuration 
conf, FileSystem fs, TableName tn) {
-097this.conn = conn;
-098this.conf = conf;
-099this.fs = fs;
-100this.tn = tn;
-101ReentrantReadWriteLock lock = new 
ReentrantReadWriteLock();
-102readLock = lock.readLock();
-103writeLock = lock.writeLock();
-104  }
-105
-106  static synchronized Object 
getLockForNamespace(String namespace) {
-107return 
NAMESPACE_LOCKS.computeIfAbsent(namespace, (ns) - new Object());
-108  }
-109
-110  /**
-111   * Returns a strictly-increasing 
measure of time extracted by {@link System#nanoTime()}.
-112   */
-113  long getLastFullCompute() {
-114return lastFullCompute;
-115  }
-116
-117  @Override
-118  public void 
addArchivedFiles(SetEntryString, Long fileSizes) throws 
IOException {
-119long start = System.nanoTime();
-120readLock.lock();
-121try {
-122  // We want to catch the case where 
we got an archival request, but there was a full
-123  // re-computation in progress that 
was blocking us. Most likely, the full computation is going
-124  // to already include the changes 
we were going to make.
-125  //
-126  // Same as "start 

[08/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
index 7d1dba6..11f9915 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
@@ -50,114 +50,114 @@
 042import 
java.util.concurrent.atomic.AtomicBoolean;
 043import 
java.util.concurrent.atomic.AtomicLong;
 044import 
java.util.concurrent.atomic.LongAdder;
-045import 
org.apache.commons.collections.CollectionUtils;
-046import 
org.apache.commons.lang3.mutable.MutableObject;
-047import 
org.apache.hadoop.conf.Configuration;
-048import org.apache.hadoop.fs.Path;
-049import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-050import 
org.apache.hadoop.hbase.CacheEvictionStats;
-051import 
org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
-052import org.apache.hadoop.hbase.Cell;
-053import 
org.apache.hadoop.hbase.CellScannable;
-054import 
org.apache.hadoop.hbase.CellScanner;
-055import 
org.apache.hadoop.hbase.CellUtil;
-056import 
org.apache.hadoop.hbase.CompareOperator;
-057import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-058import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-059import 
org.apache.hadoop.hbase.HBaseIOException;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-062import 
org.apache.hadoop.hbase.NotServingRegionException;
-063import 
org.apache.hadoop.hbase.PrivateCellUtil;
-064import 
org.apache.hadoop.hbase.RegionTooBusyException;
-065import org.apache.hadoop.hbase.Server;
-066import 
org.apache.hadoop.hbase.ServerName;
-067import 
org.apache.hadoop.hbase.TableName;
-068import 
org.apache.hadoop.hbase.UnknownScannerException;
-069import 
org.apache.hadoop.hbase.client.Append;
-070import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-071import 
org.apache.hadoop.hbase.client.Delete;
-072import 
org.apache.hadoop.hbase.client.Durability;
-073import 
org.apache.hadoop.hbase.client.Get;
-074import 
org.apache.hadoop.hbase.client.Increment;
-075import 
org.apache.hadoop.hbase.client.Mutation;
-076import 
org.apache.hadoop.hbase.client.Put;
-077import 
org.apache.hadoop.hbase.client.RegionInfo;
-078import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-079import 
org.apache.hadoop.hbase.client.Result;
-080import 
org.apache.hadoop.hbase.client.Row;
-081import 
org.apache.hadoop.hbase.client.RowMutations;
-082import 
org.apache.hadoop.hbase.client.Scan;
-083import 
org.apache.hadoop.hbase.client.TableDescriptor;
-084import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-085import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-086import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-087import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-088import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-089import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-090import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-091import 
org.apache.hadoop.hbase.io.TimeRange;
-092import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-093import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-094import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-095import 
org.apache.hadoop.hbase.ipc.QosPriority;
-096import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-097import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-098import 
org.apache.hadoop.hbase.ipc.RpcServer;
-099import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-100import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-101import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apache.hadoop.hbase.ipc.ServerRpcController;
-104import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-105import 
org.apache.hadoop.hbase.master.MasterRpcServices;
-106import 
org.apache.hadoop.hbase.net.Address;
-107import 
org.apache.hadoop.hbase.procedure2.RSProcedureCallable;
-108import 
org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;
-109import 
org.apache.hadoop.hbase.quotas.OperationQuota;
-110import 
org.apache.hadoop.hbase.quotas.QuotaUtil;
-111import 
org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
-112import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-113import 
org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;
-114import 
org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;
-115import 

[17/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html
index e080cd6..73a1036 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.StoreFileReference.html
@@ -42,604 +42,605 @@
 034import java.util.function.Predicate;
 035import java.util.stream.Collectors;
 036
-037import 
org.apache.commons.lang.builder.HashCodeBuilder;
-038import org.apache.commons.logging.Log;
-039import 
org.apache.commons.logging.LogFactory;
-040import 
org.apache.hadoop.conf.Configuration;
-041import org.apache.hadoop.fs.FileStatus;
-042import org.apache.hadoop.fs.FileSystem;
-043import org.apache.hadoop.fs.Path;
-044import 
org.apache.hadoop.hbase.TableName;
-045import 
org.apache.hadoop.hbase.client.Connection;
-046import 
org.apache.hadoop.hbase.client.Get;
-047import 
org.apache.hadoop.hbase.client.Put;
-048import 
org.apache.hadoop.hbase.client.Result;
-049import 
org.apache.hadoop.hbase.client.Table;
-050import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-051import 
org.apache.hadoop.hbase.snapshot.SnapshotManifest;
-052import 
org.apache.hadoop.hbase.util.FSUtils;
-053import 
org.apache.hadoop.hbase.util.HFileArchiveUtil;
-054import 
org.apache.hadoop.util.StringUtils;
-055import 
org.apache.yetus.audience.InterfaceAudience;
-056
-057import 
org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap;
-058import 
org.apache.hbase.thirdparty.com.google.common.collect.Multimap;
-059import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-060
-061import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-062import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile;
-066
-067/**
-068 * Tracks file archiving and updates the 
hbase quota table.
-069 */
-070@InterfaceAudience.Private
-071public class FileArchiverNotifierImpl 
implements FileArchiverNotifier {
-072  private static final Log LOG = 
LogFactory.getLog(FileArchiverNotifierImpl.class);
-073  private final Connection conn;
-074  private final Configuration conf;
-075  private final FileSystem fs;
-076  private final TableName tn;
-077  private final ReadLock readLock;
-078  private final WriteLock writeLock;
-079  private volatile long lastFullCompute = 
Long.MIN_VALUE;
-080  private ListString 
currentSnapshots = Collections.emptyList();
-081  private static final 
MapString,Object NAMESPACE_LOCKS = new HashMap();
-082
-083  /**
-084   * An Exception thrown when 
SnapshotSize updates to hbase:quota fail to be written.
-085   */
-086  @InterfaceAudience.Private
-087  public static class 
QuotaSnapshotSizeSerializationException extends IOException {
-088private static final long 
serialVersionUID = 1L;
-089
-090public 
QuotaSnapshotSizeSerializationException(String msg) {
-091  super(msg);
-092}
-093  }
-094
-095  public FileArchiverNotifierImpl(
-096  Connection conn, Configuration 
conf, FileSystem fs, TableName tn) {
-097this.conn = conn;
-098this.conf = conf;
-099this.fs = fs;
-100this.tn = tn;
-101ReentrantReadWriteLock lock = new 
ReentrantReadWriteLock();
-102readLock = lock.readLock();
-103writeLock = lock.writeLock();
-104  }
-105
-106  static synchronized Object 
getLockForNamespace(String namespace) {
-107return 
NAMESPACE_LOCKS.computeIfAbsent(namespace, (ns) - new Object());
-108  }
-109
-110  /**
-111   * Returns a strictly-increasing 
measure of time extracted by {@link System#nanoTime()}.
-112   */
-113  long getLastFullCompute() {
-114return lastFullCompute;
-115  }
-116
-117  @Override
-118  public void 
addArchivedFiles(SetEntryString, Long fileSizes) throws 
IOException {
-119long start = System.nanoTime();
-120readLock.lock();
-121try {
-122  // We want to catch the case where 
we got an archival request, but there was a full
-123  // re-computation in progress that 
was blocking us. Most likely, the full computation is going
-124  // to already include the changes 
we were going to make.
-125  //
-126  // Same as "start  
lastFullCompute" but avoiding numeric overflow per the
-127  // System.nanoTime() javadoc
-128 

[25/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/apidocs/org/apache/hadoop/hbase/client/RowMutations.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/client/RowMutations.html 
b/apidocs/org/apache/hadoop/hbase/client/RowMutations.html
index 7142b04..46db553 100644
--- a/apidocs/org/apache/hadoop/hbase/client/RowMutations.html
+++ b/apidocs/org/apache/hadoop/hbase/client/RowMutations.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class RowMutations
+public class RowMutations
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements Row
 Performs multiple mutations atomically on a single row.
@@ -284,7 +284,7 @@ implements 
 
 RowMutations
-publicRowMutations(byte[]row)
+publicRowMutations(byte[]row)
 
 
 
@@ -293,7 +293,7 @@ implements 
 
 RowMutations
-publicRowMutations(byte[]row,
+publicRowMutations(byte[]row,
 intinitialCapacity)
 Create an atomic mutation for the specified row.
 
@@ -317,7 +317,7 @@ implements 
 
 of
-public staticRowMutationsof(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Mutationmutations)
+public staticRowMutationsof(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Mutationmutations)
throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Create a RowMutations 
with the specified mutations.
 
@@ -337,7 +337,7 @@ implements 
 add
 https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-publicvoidadd(Putp)
+publicvoidadd(Putp)
  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Deprecated.since 2.0 version and will be removed in 3.0 version.
  use add(Mutation)
@@ -357,7 +357,7 @@ publicvoid
 add
 https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-publicvoidadd(Deleted)
+publicvoidadd(Deleted)
  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Deprecated.since 2.0 version and will be removed in 3.0 version.
  use add(Mutation)
@@ -376,7 +376,7 @@ publicvoid
 
 add
-publicRowMutationsadd(Mutationmutation)
+publicRowMutationsadd(Mutationmutation)
  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Currently only supports Put and Delete mutations.
 
@@ -393,7 +393,7 @@ publicvoid
 
 add
-publicRowMutationsadd(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Mutationmutations)
+publicRowMutationsadd(https://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">List? extends Mutationmutations)
  throws https://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Currently only supports Put and Delete mutations.
 
@@ -411,7 +411,7 @@ publicvoid
 compareTo
 https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-publicintcompareTo(Rowi)
+publicintcompareTo(Rowi)
 Deprecated.As of release 2.0.0, this will be removed in HBase 
3.0.0.
  Use Row.COMPARATOR
 instead
 
@@ -429,7 +429,7 @@ publicint
 equals
 https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-publicbooleanequals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
+publicbooleanequals(https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Objectobj)
 Deprecated.As of release 2.0.0, this will be removed in HBase 
3.0.0.
  No replacement
 
@@ -445,7 +445,7 @@ publicboolean
 hashCode
 https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html?is-external=true;
 title="class or interface in java.lang">@Deprecated
-publicinthashCode()
+publicinthashCode()
 Deprecated.As of release 2.0.0, this will be removed in HBase 
3.0.0.
  No replacement
 
@@ -460,7 +460,7 @@ publicint
 
 getRow

[24/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 86842ab..67a62c1 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -18536,7 +18536,7 @@
 imports
 UnusedImports
 Unused import - org.apache.hadoop.conf.Configuration.
-27
+25
 
 org/apache/hadoop/hbase/TestFSTableDescriptorForceCreation.java
 
@@ -27464,7 +27464,7 @@
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-146
+147
 
 org/apache/hadoop/hbase/client/RpcRetryingCaller.java
 
@@ -86249,7 +86249,7 @@
 imports
 ImportOrder
 Wrong order for 'org.apache.hadoop.hbase.KeyValue' import.
-37
+36
 
 Error
 indentation
@@ -123417,7 +123417,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-04-19
+  Last Published: 
2018-04-20
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/coc.html
--
diff --git a/coc.html b/coc.html
index c11a645..0144c87 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Code of Conduct Policy
@@ -375,7 +375,7 @@ email to mailto:priv...@hbase.apache.org;>the priv
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-04-19
+  Last Published: 
2018-04-20
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index 917c2c5..461e974 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependencies
 
@@ -440,7 +440,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-04-19
+  Last Published: 
2018-04-20
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 4af4e18..76564c8 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Reactor Dependency Convergence
 
@@ -1105,7 +1105,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-04-19
+  Last Published: 
2018-04-20
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index 0c1d73a..6d9a8c7 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Dependency Information
 
@@ -313,7 +313,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-04-19
+  Last Published: 
2018-04-20
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index 7806822..fa99e3e 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependency Management
 
@@ -969,7 +969,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-04-19
+  Last Published: 
2018-04-20
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/constant-values.html
--
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index d4d265b..60dc3eb 100644
--- a/devapidocs/constant-values.html
+++ 

[01/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 12c47ed27 -> 1facf1d3a


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html
index 6cb193a..1ce94ea 100644
--- a/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html
+++ b/testdevapidocs/src-html/org/apache/hadoop/hbase/wal/TestDisabledWAL.html
@@ -30,92 +30,93 @@
 022
 023import java.io.IOException;
 024
-025import org.apache.commons.logging.Log;
-026import 
org.apache.commons.logging.LogFactory;
-027import 
org.apache.hadoop.conf.Configuration;
-028import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-029import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-030import 
org.apache.hadoop.hbase.TableName;
-031import 
org.apache.hadoop.hbase.client.Get;
-032import 
org.apache.hadoop.hbase.client.Put;
-033import 
org.apache.hadoop.hbase.client.Result;
-034import 
org.apache.hadoop.hbase.client.Table;
-035import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-036import 
org.apache.hadoop.hbase.testclassification.RegionServerTests;
-037import 
org.apache.hadoop.hbase.util.Bytes;
-038import org.junit.After;
-039import org.junit.AfterClass;
-040import org.junit.Before;
-041import org.junit.BeforeClass;
-042import org.junit.ClassRule;
-043import org.junit.Rule;
-044import org.junit.Test;
-045import 
org.junit.experimental.categories.Category;
-046import org.junit.rules.TestName;
-047
-048@Category({ RegionServerTests.class, 
MediumTests.class })
-049public class TestDisabledWAL {
-050
-051  @ClassRule
-052  public static final HBaseClassTestRule 
CLASS_RULE =
-053  
HBaseClassTestRule.forClass(TestDisabledWAL.class);
-054
-055  @Rule
-056  public TestName name = new 
TestName();
-057
-058  private static final Log LOG = 
LogFactory.getLog(TestDisabledWAL.class);
-059  static final HBaseTestingUtility 
TEST_UTIL = new HBaseTestingUtility();
-060  private Table table;
-061  private TableName tableName;
-062  private byte[] fam = 
Bytes.toBytes("f1");
-063
-064  @BeforeClass
-065  public static void beforeClass() throws 
Exception {
-066Configuration conf = 
TEST_UTIL.getConfiguration();
-067
conf.setBoolean("hbase.regionserver.hlog.enabled", false);
-068try {
-069  TEST_UTIL.startMiniCluster();
-070} catch (RuntimeException | 
IOException e) {
-071  LOG.error("Master failed to 
start.", e);
-072  fail("Failed to start cluster. 
Reason being: " + e.getCause().getMessage());
-073}
-074  }
-075
-076  @AfterClass
-077  public static void afterClass() throws 
Exception {
-078TEST_UTIL.shutdownMiniCluster();
-079  }
-080
-081  @Before
-082  public void setup() throws Exception 
{
-083tableName = 
TableName.valueOf(name.getMethodName().replaceAll("[^a-zA-Z0-9]", "_"));
-084LOG.info("Creating table " + 
tableName);
-085table = 
TEST_UTIL.createTable(tableName, fam);
-086  }
-087
-088  @After
-089  public void cleanup() throws Exception 
{
-090LOG.info("Deleting table " + 
tableName);
-091TEST_UTIL.deleteTable(tableName);
-092  }
-093
-094  @Test
-095  public void testDisabledWAL() throws 
Exception {
-096LOG.info("Writing data to table " + 
tableName);
-097Put p = new 
Put(Bytes.toBytes("row"));
-098p.addColumn(fam, 
Bytes.toBytes("qual"), Bytes.toBytes("val"));
-099table.put(p);
-100
-101LOG.info("Flushing table " + 
tableName);
-102TEST_UTIL.flush(tableName);
-103
-104LOG.info("Getting data from table " + 
tableName);
-105Get get = new 
Get(Bytes.toBytes("row"));
-106
-107Result result = table.get(get);
-108assertNotNull(result.getValue(fam, 
Bytes.toBytes("qual")));
-109  }
-110}
+025import 
org.apache.hadoop.conf.Configuration;
+026import 
org.apache.hadoop.hbase.HBaseClassTestRule;
+027import 
org.apache.hadoop.hbase.HBaseTestingUtility;
+028import 
org.apache.hadoop.hbase.TableName;
+029import 
org.apache.hadoop.hbase.client.Get;
+030import 
org.apache.hadoop.hbase.client.Put;
+031import 
org.apache.hadoop.hbase.client.Result;
+032import 
org.apache.hadoop.hbase.client.Table;
+033import 
org.apache.hadoop.hbase.testclassification.MediumTests;
+034import 
org.apache.hadoop.hbase.testclassification.RegionServerTests;
+035import 
org.apache.hadoop.hbase.util.Bytes;
+036import org.junit.After;
+037import org.junit.AfterClass;
+038import org.junit.Before;
+039import org.junit.BeforeClass;
+040import org.junit.ClassRule;
+041import org.junit.Rule;
+042import org.junit.Test;
+043import 
org.junit.experimental.categories.Category;
+044import org.junit.rules.TestName;
+045
+046import org.slf4j.Logger;
+047import org.slf4j.LoggerFactory;
+048
+049@Category({ RegionServerTests.class, 
MediumTests.class })
+050public class 

[26/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 14ebd1a..7ac23b6 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,16 +5,16 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20180419144425+00'00')
-/CreationDate (D:20180419144425+00'00')
+/ModDate (D:20180420144439+00'00')
+/CreationDate (D:20180420144439+00'00')
 >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 3 0 R
 /Names 26 0 R
-/Outlines 4616 0 R
-/PageLabels 4842 0 R
+/Outlines 4634 0 R
+/PageLabels 4860 0 R
 /PageMode /UseOutlines
 /OpenAction [7 0 R /FitH 842.89]
 /ViewerPreferences << /DisplayDocTitle true
@@ -23,8 +23,8 @@ endobj
 endobj
 3 0 obj
 << /Type /Pages
-/Count 721
-/Kids [7 0 R 12 0 R 14 0 R 16 0 R 18 0 R 20 0 R 22 0 R 24 0 R 44 0 R 47 0 R 50 
0 R 54 0 R 61 0 R 63 0 R 67 0 R 69 0 R 71 0 R 78 0 R 81 0 R 83 0 R 89 0 R 92 0 
R 94 0 R 96 0 R 103 0 R 110 0 R 115 0 R 117 0 R 133 0 R 138 0 R 146 0 R 155 0 R 
163 0 R 172 0 R 183 0 R 187 0 R 189 0 R 193 0 R 202 0 R 211 0 R 219 0 R 228 0 R 
233 0 R 242 0 R 250 0 R 259 0 R 272 0 R 279 0 R 289 0 R 297 0 R 305 0 R 312 0 R 
320 0 R 327 0 R 333 0 R 340 0 R 348 0 R 357 0 R 366 0 R 380 0 R 387 0 R 395 0 R 
402 0 R 410 0 R 419 0 R 429 0 R 437 0 R 444 0 R 453 0 R 465 0 R 475 0 R 482 0 R 
489 0 R 497 0 R 506 0 R 514 0 R 519 0 R 523 0 R 528 0 R 532 0 R 548 0 R 559 0 R 
563 0 R 578 0 R 583 0 R 588 0 R 590 0 R 592 0 R 595 0 R 597 0 R 599 0 R 607 0 R 
613 0 R 618 0 R 623 0 R 630 0 R 640 0 R 648 0 R 652 0 R 656 0 R 658 0 R 668 0 R 
682 0 R 691 0 R 700 0 R 710 0 R 721 0 R 732 0 R 751 0 R 757 0 R 760 0 R 766 0 R 
769 0 R 773 0 R 777 0 R 780 0 R 783 0 R 785 0 R 788 0 R 792 0 R 794 0 R 798 0 R 
804 0 R 809 0 R 813 0 R 816 0 R 822 0 R
  824 0 R 828 0 R 836 0 R 838 0 R 841 0 R 844 0 R 847 0 R 850 0 R 864 0 R 872 0 
R 883 0 R 894 0 R 900 0 R 910 0 R 921 0 R 924 0 R 928 0 R 931 0 R 936 0 R 945 0 
R 953 0 R 957 0 R 961 0 R 966 0 R 970 0 R 972 0 R 988 0 R 999 0 R 1004 0 R 1011 
0 R 1014 0 R 1022 0 R 1030 0 R 1035 0 R 1040 0 R 1045 0 R 1047 0 R 1049 0 R 
1051 0 R 1061 0 R 1069 0 R 1073 0 R 1080 0 R 1087 0 R 1095 0 R 1099 0 R 1105 0 
R 1110 0 R 1118 0 R 1122 0 R 1127 0 R 1129 0 R 1135 0 R 1143 0 R 1149 0 R 1156 
0 R 1167 0 R 1171 0 R 1173 0 R 1175 0 R 1179 0 R 1182 0 R 1187 0 R 1190 0 R 
1202 0 R 1206 0 R 1212 0 R 1220 0 R 1225 0 R 1229 0 R 1233 0 R 1235 0 R 1238 0 
R 1241 0 R 1244 0 R 1248 0 R 1252 0 R 1256 0 R 1261 0 R 1265 0 R 1268 0 R 1270 
0 R 1280 0 R 1283 0 R 1291 0 R 1300 0 R 1306 0 R 1310 0 R 1312 0 R 1323 0 R 
1326 0 R 1332 0 R 1340 0 R 1343 0 R 1350 0 R 1358 0 R 1360 0 R 1362 0 R 1371 0 
R 1373 0 R 1375 0 R 1378 0 R 1380 0 R 1382 0 R 1384 0 R 1386 0 R 1389 0 R 1393 
0 R 1398 0 R 1400 0 R 1402 0 R 1404 0 R 1409 0 R 1416 0 
 R 1422 0 R 1425 0 R 1427 0 R 1430 0 R 1434 0 R 1438 0 R 1441 0 R 1443 0 R 1445 
0 R 1448 0 R 1453 0 R 1459 0 R 1467 0 R 1481 0 R 1495 0 R 1498 0 R 1503 0 R 
1516 0 R 1521 0 R 1536 0 R 1544 0 R 1548 0 R 1557 0 R 1572 0 R 1586 0 R 1598 0 
R 1603 0 R 1609 0 R 1619 0 R 1624 0 R 1629 0 R 1637 0 R 1640 0 R 1649 0 R 1655 
0 R 1659 0 R 1671 0 R 1676 0 R 1682 0 R 1684 0 R 1691 0 R 1699 0 R 1707 0 R 
1711 0 R 1713 0 R 1715 0 R 1727 0 R 1733 0 R 1742 0 R 1748 0 R 1761 0 R 1767 0 
R 1773 0 R 1784 0 R 1790 0 R 1795 0 R 1800 0 R 1803 0 R 1806 0 R 1811 0 R 1816 
0 R 1823 0 R 1827 0 R 1832 0 R 1841 0 R 1846 0 R 1851 0 R 1853 0 R 1862 0 R 
1869 0 R 1875 0 R 1880 0 R 1884 0 R 1888 0 R 1893 0 R 1898 0 R 1904 0 R 1906 0 
R 1908 0 R 1911 0 R 1922 0 R 1925 0 R 1932 0 R 1940 0 R 1945 0 R 1949 0 R 1954 
0 R 1956 0 R 1959 0 R 1964 0 R 1967 0 R 1969 0 R 1972 0 R 1975 0 R 1978 0 R 
1988 0 R 1993 0 R 1998 0 R 2000 0 R 2008 0 R 2015 0 R 2022 0 R 2028 0 R 2033 0 
R 2035 0 R 2044 0 R 2054 0 R 2064 0 R 2070 0 R 2077 0 R 2079 
 0 R 2084 0 R 2086 0 R 2088 0 R 2092 0 R 2095 0 R 2098 0 R 2103 0 R 2107 0 R 
2118 0 R 2121 0 R 2126 0 R 2129 0 R 2131 0 R 2136 0 R 2146 0 R 2148 0 R 2150 0 
R 2152 0 R 2154 0 R 2157 0 R 2159 0 R 2161 0 R 2164 0 R 2166 0 R 2168 0 R 2172 
0 R 2177 0 R 2186 0 R 2188 0 R 2190 0 R 2196 0 R 2198 0 R 2203 0 R 2205 0 R 
2207 0 R 2214 0 R 2219 0 R 2223 0 R 2228 0 R 2232 0 R 2234 0 R 2236 0 R 2240 0 
R 2243 0 R 2245 0 R 2247 0 R 2251 0 R 2253 0 R 2256 0 R 2258 0 R 2260 0 R 2262 
0 R 2269 0 R 2272 0 R 2277 0 R 2279 0 R 2281 0 R 2283 0 R 2285 0 R 2293 0 R 
2304 0 R 2318 0 R 2329 0 R 2333 0 R 2338 0 R 2342 0 R 2345 0 R 2350 0 R 2356 0 
R 2358 0 R 2361 0 R 2363 0 R 2365 0 R 2367 0 R 2372 0 R 2374 0 R 2387 0 R 2390 
0 R 2398 0 R 2404 0 R 2416 0 R 2430 0 R 2443 0 R 2462 0 R 2464 0 R 2466 0 R 
2470 0 R 2488 0 R 2494 0 R 2506 0 R 2510 0 R 2514 0 R 2523 0 R 2535 0 R 2540 0 
R 2550 0 R 2563 0 R 2582 0 R 2591 0 R 2594 0 R 2603 0 R 2620 0 R 2627 0 R 2630 
0 R 2635 0 R 2639 0 R 2642 0 R 2651 

[04/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/org/apache/hadoop/hbase/client/TestFlushFromClient.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/client/TestFlushFromClient.html 
b/testdevapidocs/org/apache/hadoop/hbase/client/TestFlushFromClient.html
index 4228156..1fe64a5 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/client/TestFlushFromClient.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/client/TestFlushFromClient.html
@@ -109,7 +109,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestFlushFromClient
+public class TestFlushFromClient
 extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 
 
@@ -142,7 +142,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 FAMILY
 
 
-private static 
org.apache.commons.logging.Log
+private static org.slf4j.Logger
 LOG
 
 
@@ -273,7 +273,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 CLASS_RULE
-public static finalHBaseClassTestRule CLASS_RULE
+public static finalHBaseClassTestRule CLASS_RULE
 
 
 
@@ -282,7 +282,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -291,7 +291,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 TEST_UTIL
-private static finalHBaseTestingUtility TEST_UTIL
+private static finalHBaseTestingUtility TEST_UTIL
 
 
 
@@ -300,7 +300,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 asyncConn
-private staticorg.apache.hadoop.hbase.client.AsyncConnection asyncConn
+private staticorg.apache.hadoop.hbase.client.AsyncConnection asyncConn
 
 
 
@@ -309,7 +309,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 SPLITS
-private static finalbyte[][] SPLITS
+private static finalbyte[][] SPLITS
 
 
 
@@ -318,7 +318,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 ROWS
-private static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listbyte[] ROWS
+private static finalhttps://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listbyte[] ROWS
 
 
 
@@ -327,7 +327,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 FAMILY
-private static finalbyte[] FAMILY
+private static finalbyte[] FAMILY
 
 
 
@@ -336,7 +336,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 name
-publicorg.junit.rules.TestName name
+publicorg.junit.rules.TestName name
 
 
 
@@ -345,7 +345,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 tableName
-publicorg.apache.hadoop.hbase.TableName tableName
+publicorg.apache.hadoop.hbase.TableName tableName
 
 
 
@@ -362,7 +362,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 TestFlushFromClient
-publicTestFlushFromClient()
+publicTestFlushFromClient()
 
 
 
@@ -379,7 +379,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 setUpBeforeClass
-public staticvoidsetUpBeforeClass()
+public staticvoidsetUpBeforeClass()
  throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -393,7 +393,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 tearDownAfterClass
-public staticvoidtearDownAfterClass()
+public staticvoidtearDownAfterClass()
throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -407,7 +407,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 setUp
-publicvoidsetUp()
+publicvoidsetUp()
throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -421,7 +421,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 tearDown
-publicvoidtearDown()
+publicvoidtearDown()
   throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -435,7 +435,7 @@ extends https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html
 
 
 testFlushTable
-publicvoidtestFlushTable()
+publicvoidtestFlushTable()
 throws https://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:

[21/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
index 0ef4c76..1c79f3b 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.html
@@ -32,123 +32,124 @@
 024import java.util.List;
 025import java.util.Map;
 026
-027import 
org.apache.commons.collections.MapUtils;
-028import 
org.apache.hadoop.conf.Configuration;
-029import org.apache.hadoop.fs.FileStatus;
-030import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-031import 
org.apache.hadoop.hbase.TableNotFoundException;
-032import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
-033import 
org.apache.hadoop.hbase.backup.impl.BackupManager;
-034import 
org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
-035import 
org.apache.hadoop.hbase.client.Connection;
-036import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-037import 
org.apache.hadoop.hbase.master.HMaster;
-038import 
org.apache.hadoop.hbase.master.MasterServices;
-039import 
org.apache.hadoop.hbase.master.cleaner.BaseLogCleanerDelegate;
-040import 
org.apache.yetus.audience.InterfaceAudience;
-041
-042import org.slf4j.Logger;
-043import org.slf4j.LoggerFactory;
-044
-045/**
-046 * Implementation of a log cleaner that 
checks if a log is still scheduled for incremental backup
-047 * before deleting it when its TTL is 
over.
-048 */
-049@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
-050public class BackupLogCleaner extends 
BaseLogCleanerDelegate {
-051  private static final Logger LOG = 
LoggerFactory.getLogger(BackupLogCleaner.class);
-052
-053  private boolean stopped = false;
-054  private Connection conn;
-055
-056  public BackupLogCleaner() {
-057  }
-058
-059  @Override
-060  public void init(MapString, 
Object params) {
-061MasterServices master = 
(MasterServices) MapUtils.getObject(params,
-062  HMaster.MASTER);
-063if (master != null) {
-064  conn = master.getConnection();
-065  if (getConf() == null) {
-066
super.setConf(conn.getConfiguration());
-067  }
-068}
-069if (conn == null) {
-070  try {
-071conn = 
ConnectionFactory.createConnection(getConf());
-072  } catch (IOException ioe) {
-073throw new 
RuntimeException("Failed to create connection", ioe);
-074  }
-075}
-076  }
-077
-078  @Override
-079  public IterableFileStatus 
getDeletableFiles(IterableFileStatus files) {
-080// all members of this class are null 
if backup is disabled,
-081// so we cannot filter the files
-082if (this.getConf() == null || 
!BackupManager.isBackupEnabled(getConf())) {
-083  LOG.debug("Backup is not enabled. 
Check your {} setting",
-084  
BackupRestoreConstants.BACKUP_ENABLE_KEY);
-085  return files;
-086}
-087
-088try (final BackupSystemTable table = 
new BackupSystemTable(conn)) {
-089  // If we do not have recorded 
backup sessions
-090  try {
-091if (!table.hasBackupSessions()) 
{
-092  LOG.trace("BackupLogCleaner has 
no backup sessions");
-093  return files;
-094}
-095  } catch (TableNotFoundException 
tnfe) {
-096LOG.warn("Backup system table is 
not available: {}", tnfe.getMessage());
-097return files;
-098  }
-099
-100  ListFileStatus list = new 
ArrayList();
-101  MapFileStatus, Boolean 
walFilesDeletableMap = table.areWALFilesDeletable(files);
-102  for (Map.EntryFileStatus, 
Boolean entry: walFilesDeletableMap.entrySet()) {
-103FileStatus file = 
entry.getKey();
-104String wal = 
file.getPath().toString();
-105boolean deletable = 
entry.getValue();
-106if (deletable) {
-107  LOG.debug("Found log file in 
backup system table, deleting: {}", wal);
-108  list.add(file);
-109} else {
-110  LOG.debug("Did not find this 
log in backup system table, keeping: {}", wal);
-111}
-112  }
-113  return list;
-114} catch (IOException e) {
-115  LOG.error("Failed to get backup 
system table table, therefore will keep all files", e);
-116  // nothing to delete
-117  return Collections.emptyList();
-118}
-119  }
-120
-121  @Override
-122  public void setConf(Configuration 
config) {
-123// If backup is disabled, keep all 
members null
-124super.setConf(config);
-125if 
(!config.getBoolean(BackupRestoreConstants.BACKUP_ENABLE_KEY,
-126  
BackupRestoreConstants.BACKUP_ENABLE_DEFAULT)) {
-127  LOG.warn("Backup is disabled - 
allowing all wals to be deleted");
-128}
-129  }
-130
-131  @Override
-132  public void 

[10/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
index 7137829..4a879bb 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.WriteState.html
@@ -78,124 +78,124 @@
 070import 
java.util.concurrent.locks.ReadWriteLock;
 071import 
java.util.concurrent.locks.ReentrantReadWriteLock;
 072import java.util.function.Function;
-073import 
org.apache.commons.collections.CollectionUtils;
-074import 
org.apache.hadoop.conf.Configuration;
-075import org.apache.hadoop.fs.FileStatus;
-076import org.apache.hadoop.fs.FileSystem;
-077import 
org.apache.hadoop.fs.LocatedFileStatus;
-078import org.apache.hadoop.fs.Path;
-079import org.apache.hadoop.hbase.Cell;
-080import 
org.apache.hadoop.hbase.CellBuilderType;
-081import 
org.apache.hadoop.hbase.CellComparator;
-082import 
org.apache.hadoop.hbase.CellComparatorImpl;
-083import 
org.apache.hadoop.hbase.CellScanner;
-084import 
org.apache.hadoop.hbase.CellUtil;
-085import 
org.apache.hadoop.hbase.CompareOperator;
-086import 
org.apache.hadoop.hbase.CompoundConfiguration;
-087import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-088import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-089import 
org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
-090import 
org.apache.hadoop.hbase.HConstants;
-091import 
org.apache.hadoop.hbase.HConstants.OperationStatusCode;
-092import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-093import 
org.apache.hadoop.hbase.KeyValue;
-094import 
org.apache.hadoop.hbase.KeyValueUtil;
-095import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-096import 
org.apache.hadoop.hbase.NotServingRegionException;
-097import 
org.apache.hadoop.hbase.PrivateCellUtil;
-098import 
org.apache.hadoop.hbase.RegionTooBusyException;
-099import 
org.apache.hadoop.hbase.TableName;
-100import org.apache.hadoop.hbase.Tag;
-101import org.apache.hadoop.hbase.TagUtil;
-102import 
org.apache.hadoop.hbase.UnknownScannerException;
-103import 
org.apache.hadoop.hbase.client.Append;
-104import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-105import 
org.apache.hadoop.hbase.client.CompactionState;
-106import 
org.apache.hadoop.hbase.client.Delete;
-107import 
org.apache.hadoop.hbase.client.Durability;
-108import 
org.apache.hadoop.hbase.client.Get;
-109import 
org.apache.hadoop.hbase.client.Increment;
-110import 
org.apache.hadoop.hbase.client.IsolationLevel;
-111import 
org.apache.hadoop.hbase.client.Mutation;
-112import 
org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
-113import 
org.apache.hadoop.hbase.client.Put;
-114import 
org.apache.hadoop.hbase.client.RegionInfo;
-115import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-116import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-117import 
org.apache.hadoop.hbase.client.Result;
-118import 
org.apache.hadoop.hbase.client.RowMutations;
-119import 
org.apache.hadoop.hbase.client.Scan;
-120import 
org.apache.hadoop.hbase.client.TableDescriptor;
-121import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-122import 
org.apache.hadoop.hbase.conf.ConfigurationManager;
-123import 
org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
-124import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-125import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
-126import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-127import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-128import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-129import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-130import 
org.apache.hadoop.hbase.filter.FilterWrapper;
-131import 
org.apache.hadoop.hbase.filter.IncompatibleFilterException;
-132import 
org.apache.hadoop.hbase.io.HFileLink;
-133import 
org.apache.hadoop.hbase.io.HeapSize;
-134import 
org.apache.hadoop.hbase.io.TimeRange;
-135import 
org.apache.hadoop.hbase.io.hfile.HFile;
-136import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
-137import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-138import 
org.apache.hadoop.hbase.ipc.RpcCall;
-139import 
org.apache.hadoop.hbase.ipc.RpcServer;
-140import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-141import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-142import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-143import 
org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
-144import 
org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;
-145import 

[07/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html
index 09e41f8..e103c0d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.html
@@ -29,179 +29,182 @@
 021import java.util.Map;
 022import 
java.util.concurrent.ConcurrentSkipListMap;
 023import 
java.util.concurrent.atomic.AtomicInteger;
-024import org.apache.commons.logging.Log;
-025import 
org.apache.commons.logging.LogFactory;
-026import 
org.apache.hadoop.conf.Configuration;
-027import org.apache.hadoop.hbase.Cell;
-028import 
org.apache.hadoop.hbase.RegionTooBusyException;
-029import 
org.apache.hadoop.hbase.regionserver.Region;
-030import 
org.apache.hadoop.hbase.regionserver.Store;
-031import 
org.apache.hadoop.hbase.util.Bytes;
-032import 
org.apache.hadoop.hbase.util.ClassSize;
-033import 
org.apache.yetus.audience.InterfaceAudience;
-034import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-035/**
-036 * StoreHotnessProtector is designed to 
help limit the concurrency of puts with dense columns, it
-037 * does best-effort to avoid exhausting 
all RS's handlers. When a lot of clients write requests with
-038 * dense (hundreds) columns to a Store at 
the same time, it will lead to blocking of RS because CSLM
-039 * degrades when concurrency goes up. 
It's not a kind of throttling. Throttling is user-oriented,
-040 * while StoreHotnessProtector is 
system-oriented, RS-self-protected mechanism.
-041 * p
-042 * There are three key parameters:
-043 * p
-044 * 1. 
parallelPutToStoreThreadLimitCheckMinColumnCount: If the amount of columns 
exceed this
-045 * threshold, the HotProtector will work, 
100 by default
+024import 
org.apache.hadoop.conf.Configuration;
+025import org.apache.hadoop.hbase.Cell;
+026import 
org.apache.hadoop.hbase.RegionTooBusyException;
+027import 
org.apache.hadoop.hbase.regionserver.Region;
+028import 
org.apache.hadoop.hbase.regionserver.Store;
+029import 
org.apache.hadoop.hbase.util.Bytes;
+030import 
org.apache.hadoop.hbase.util.ClassSize;
+031import 
org.apache.yetus.audience.InterfaceAudience;
+032
+033import org.slf4j.Logger;
+034import org.slf4j.LoggerFactory;
+035
+036import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+037
+038/**
+039 * StoreHotnessProtector is designed to 
help limit the concurrency of puts with dense columns, it
+040 * does best-effort to avoid exhausting 
all RS's handlers. When a lot of clients write requests with
+041 * dense (hundreds) columns to a Store at 
the same time, it will lead to blocking of RS because CSLM
+042 * degrades when concurrency goes up. 
It's not a kind of throttling. Throttling is user-oriented,
+043 * while StoreHotnessProtector is 
system-oriented, RS-self-protected mechanism.
+044 * p
+045 * There are three key parameters:
 046 * p
-047 * 2. parallelPutToStoreThreadLimit: The 
amount of concurrency allowed to write puts to a Store at
-048 * the same time.
+047 * 1. 
parallelPutToStoreThreadLimitCheckMinColumnCount: If the amount of columns 
exceed this
+048 * threshold, the HotProtector will work, 
100 by default
 049 * p
-050 * 3. 
parallelPreparePutToStoreThreadLimit: The amount of concurrency allowed to
-051 * prepare writing puts to a Store at the 
same time.
+050 * 2. parallelPutToStoreThreadLimit: The 
amount of concurrency allowed to write puts to a Store at
+051 * the same time.
 052 * p
-053 * Notice that our writing pipeline 
includes three key process: MVCC acquire, writing MemStore, and
-054 * WAL. Only limit the concurrency of 
writing puts to Store(parallelPutToStoreThreadLimit) is not
-055 * enough since the actual concurrency of 
puts may still exceed the limit when MVCC contention or
-056 * slow WAL sync happens. This is why 
parallelPreparePutToStoreThreadLimit is needed.
-057 * p
-058 * This protector is enabled by default 
and could be turned off by setting
-059 * hbase.region.store.parallel.put.limit 
to 0, supporting online configuration change.
-060 */
-061@InterfaceAudience.Private
-062public class StoreHotnessProtector {
-063  private static final Log LOG = 
LogFactory.getLog(StoreHotnessProtector.class);
-064  private volatile int 
parallelPutToStoreThreadLimit;
-065
-066  private volatile int 
parallelPreparePutToStoreThreadLimit;
-067  public final static String 
PARALLEL_PUT_STORE_THREADS_LIMIT =
-068  
"hbase.region.store.parallel.put.limit";
-069  public final static String 
PARALLEL_PREPARE_PUT_STORE_MULTIPLIER =
-070  

[23/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html 
b/devapidocs/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html
index 9c27524..9f8cca5 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public abstract class ClientZKSyncer
+public abstract class ClientZKSyncer
 extends ZKListener
 Tracks the target znode(s) on server ZK cluster and 
synchronize them to client ZK cluster if
  changed
@@ -169,7 +169,7 @@ extends clientZkWatcher
 
 
-private static 
org.apache.commons.logging.Log
+private static org.slf4j.Logger
 LOG
 
 
@@ -312,7 +312,7 @@ extends 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -321,7 +321,7 @@ extends 
 
 server
-private finalServer server
+private finalServer server
 
 
 
@@ -330,7 +330,7 @@ extends 
 
 clientZkWatcher
-private finalZKWatcher clientZkWatcher
+private finalZKWatcher clientZkWatcher
 
 
 
@@ -339,7 +339,7 @@ extends 
 
 queues
-private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in 
java.util.concurrent">BlockingQueuebyte[] queues
+private finalhttps://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttps://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in 
java.util.concurrent">BlockingQueuebyte[] queues
 
 
 
@@ -356,7 +356,7 @@ extends 
 
 ClientZKSyncer
-publicClientZKSyncer(ZKWatcherwatcher,
+publicClientZKSyncer(ZKWatcherwatcher,
   ZKWatcherclientZkWatcher,
   Serverserver)
 
@@ -375,7 +375,7 @@ extends 
 
 start
-publicvoidstart()
+publicvoidstart()
throws org.apache.zookeeper.KeeperException
 Starts the syncer
 
@@ -390,7 +390,7 @@ extends 
 
 watchAndCheckExists
-privatevoidwatchAndCheckExists(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnode)
+privatevoidwatchAndCheckExists(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnode)
 
 
 
@@ -399,7 +399,7 @@ extends 
 
 upsertQueue
-privatevoidupsertQueue(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnode,
+privatevoidupsertQueue(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnode,
  byte[]data)
 Update the value of the single element in queue if any, or 
else insert.
  
@@ -417,7 +417,7 @@ extends 
 
 setDataForClientZkUntilSuccess
-private finalvoidsetDataForClientZkUntilSuccess(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnode,
+private finalvoidsetDataForClientZkUntilSuccess(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringnode,
   byte[]data)
throws https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException
 Set data for client ZK and retry until succeed. Be very 
careful to prevent dead loop when
@@ -437,7 +437,7 @@ extends 
 
 reconnectAfterExpiration
-private finalvoidreconnectAfterExpiration()
+private finalvoidreconnectAfterExpiration()
  throws https://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException
 
 Throws:
@@ -451,7 +451,7 @@ extends 
 
 nodeCreated
-publicvoidnodeCreated(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringpath)
+publicvoidnodeCreated(https://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 

[18/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html
index e080cd6..73a1036 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.SnapshotWithSize.html
@@ -42,604 +42,605 @@
 034import java.util.function.Predicate;
 035import java.util.stream.Collectors;
 036
-037import 
org.apache.commons.lang.builder.HashCodeBuilder;
-038import org.apache.commons.logging.Log;
-039import 
org.apache.commons.logging.LogFactory;
-040import 
org.apache.hadoop.conf.Configuration;
-041import org.apache.hadoop.fs.FileStatus;
-042import org.apache.hadoop.fs.FileSystem;
-043import org.apache.hadoop.fs.Path;
-044import 
org.apache.hadoop.hbase.TableName;
-045import 
org.apache.hadoop.hbase.client.Connection;
-046import 
org.apache.hadoop.hbase.client.Get;
-047import 
org.apache.hadoop.hbase.client.Put;
-048import 
org.apache.hadoop.hbase.client.Result;
-049import 
org.apache.hadoop.hbase.client.Table;
-050import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-051import 
org.apache.hadoop.hbase.snapshot.SnapshotManifest;
-052import 
org.apache.hadoop.hbase.util.FSUtils;
-053import 
org.apache.hadoop.hbase.util.HFileArchiveUtil;
-054import 
org.apache.hadoop.util.StringUtils;
-055import 
org.apache.yetus.audience.InterfaceAudience;
-056
-057import 
org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap;
-058import 
org.apache.hbase.thirdparty.com.google.common.collect.Multimap;
-059import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-060
-061import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-062import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile;
-066
-067/**
-068 * Tracks file archiving and updates the 
hbase quota table.
-069 */
-070@InterfaceAudience.Private
-071public class FileArchiverNotifierImpl 
implements FileArchiverNotifier {
-072  private static final Log LOG = 
LogFactory.getLog(FileArchiverNotifierImpl.class);
-073  private final Connection conn;
-074  private final Configuration conf;
-075  private final FileSystem fs;
-076  private final TableName tn;
-077  private final ReadLock readLock;
-078  private final WriteLock writeLock;
-079  private volatile long lastFullCompute = 
Long.MIN_VALUE;
-080  private ListString 
currentSnapshots = Collections.emptyList();
-081  private static final 
MapString,Object NAMESPACE_LOCKS = new HashMap();
-082
-083  /**
-084   * An Exception thrown when 
SnapshotSize updates to hbase:quota fail to be written.
-085   */
-086  @InterfaceAudience.Private
-087  public static class 
QuotaSnapshotSizeSerializationException extends IOException {
-088private static final long 
serialVersionUID = 1L;
-089
-090public 
QuotaSnapshotSizeSerializationException(String msg) {
-091  super(msg);
-092}
-093  }
-094
-095  public FileArchiverNotifierImpl(
-096  Connection conn, Configuration 
conf, FileSystem fs, TableName tn) {
-097this.conn = conn;
-098this.conf = conf;
-099this.fs = fs;
-100this.tn = tn;
-101ReentrantReadWriteLock lock = new 
ReentrantReadWriteLock();
-102readLock = lock.readLock();
-103writeLock = lock.writeLock();
-104  }
-105
-106  static synchronized Object 
getLockForNamespace(String namespace) {
-107return 
NAMESPACE_LOCKS.computeIfAbsent(namespace, (ns) - new Object());
-108  }
-109
-110  /**
-111   * Returns a strictly-increasing 
measure of time extracted by {@link System#nanoTime()}.
-112   */
-113  long getLastFullCompute() {
-114return lastFullCompute;
-115  }
-116
-117  @Override
-118  public void 
addArchivedFiles(SetEntryString, Long fileSizes) throws 
IOException {
-119long start = System.nanoTime();
-120readLock.lock();
-121try {
-122  // We want to catch the case where 
we got an archival request, but there was a full
-123  // re-computation in progress that 
was blocking us. Most likely, the full computation is going
-124  // to already include the changes 
we were going to make.
-125  //
-126  // Same as "start  
lastFullCompute" but avoiding numeric overflow per the
-127  // System.nanoTime() javadoc
-128  if 

[20/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html
index 3e25d25..87545d3 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.ClientZkUpdater.html
@@ -33,220 +33,221 @@
 025import 
java.util.concurrent.ArrayBlockingQueue;
 026import 
java.util.concurrent.BlockingQueue;
 027
-028import org.apache.commons.logging.Log;
-029import 
org.apache.commons.logging.LogFactory;
-030import 
org.apache.hadoop.hbase.HConstants;
-031import org.apache.hadoop.hbase.Server;
-032import 
org.apache.hadoop.hbase.util.Threads;
-033import 
org.apache.hadoop.hbase.zookeeper.ZKListener;
-034import 
org.apache.hadoop.hbase.zookeeper.ZKUtil;
-035import 
org.apache.hadoop.hbase.zookeeper.ZKWatcher;
-036import 
org.apache.yetus.audience.InterfaceAudience;
-037import org.apache.zookeeper.CreateMode;
-038import 
org.apache.zookeeper.KeeperException;
-039
-040/**
-041 * Tracks the target znode(s) on server 
ZK cluster and synchronize them to client ZK cluster if
-042 * changed
-043 * p/
-044 * The target znode(s) is given through 
{@link #getNodesToWatch()} method
-045 */
-046@InterfaceAudience.Private
-047public abstract class ClientZKSyncer 
extends ZKListener {
-048  private static final Log LOG = 
LogFactory.getLog(ClientZKSyncer.class);
-049  private final Server server;
-050  private final ZKWatcher 
clientZkWatcher;
-051  // We use queues and daemon threads to 
synchronize the data to client ZK cluster
-052  // to avoid blocking the single event 
thread for watchers
-053  private final MapString, 
BlockingQueuebyte[] queues;
-054
-055  public ClientZKSyncer(ZKWatcher 
watcher, ZKWatcher clientZkWatcher, Server server) {
-056super(watcher);
-057this.server = server;
-058this.clientZkWatcher = 
clientZkWatcher;
-059this.queues = new 
HashMap();
-060  }
-061
-062  /**
-063   * Starts the syncer
-064   * @throws KeeperException if error 
occurs when trying to create base nodes on client ZK
-065   */
-066  public void start() throws 
KeeperException {
-067LOG.debug("Starting " + 
getClass().getSimpleName());
-068
this.watcher.registerListener(this);
-069// create base znode on remote ZK
-070
ZKUtil.createWithParents(clientZkWatcher, watcher.znodePaths.baseZNode);
-071// set meta znodes for client ZK
-072CollectionString nodes = 
getNodesToWatch();
-073LOG.debug("Znodes to watch: " + 
nodes);
-074// initialize queues and threads
-075for (String node : nodes) {
-076  BlockingQueuebyte[] queue = 
new ArrayBlockingQueue(1);
-077  queues.put(node, queue);
-078  Thread updater = new 
ClientZkUpdater(node, queue);
-079  updater.setDaemon(true);
-080  updater.start();
-081  watchAndCheckExists(node);
-082}
-083  }
-084
-085  private void watchAndCheckExists(String 
node) {
-086try {
-087  if 
(ZKUtil.watchAndCheckExists(watcher, node)) {
-088byte[] data = 
ZKUtil.getDataAndWatch(watcher, node);
-089if (data != null) {
-090  // put the data into queue
-091  upsertQueue(node, data);
-092} else {
-093  // It existed but now does not, 
should has been tracked by our watcher, ignore
-094  LOG.debug("Found no data from " 
+ node);
-095  watchAndCheckExists(node);
-096}
-097  } else {
-098// cleanup stale ZNodes on client 
ZK to avoid invalid requests to server
-099
ZKUtil.deleteNodeFailSilent(clientZkWatcher, node);
-100  }
-101} catch (KeeperException e) {
-102  server.abort("Unexpected exception 
during initialization, aborting", e);
-103}
-104  }
-105
-106  /**
-107   * Update the value of the single 
element in queue if any, or else insert.
-108   * p/
-109   * We only need to synchronize the 
latest znode value to client ZK rather than synchronize each
-110   * time
-111   * @param data the data to write to 
queue
-112   */
-113  private void upsertQueue(String node, 
byte[] data) {
-114BlockingQueuebyte[] queue = 
queues.get(node);
-115synchronized (queue) {
-116  queue.poll();
-117  queue.offer(data);
-118}
-119  }
-120
-121  /**
-122   * Set data for client ZK and retry 
until succeed. Be very careful to prevent dead loop when
-123   * modifying this method
-124   * @param node the znode to set on 
client ZK
-125   * @param data the data to set to 
client ZK
-126   * @throws InterruptedException if the 
thread is interrupted during process
-127   */
-128  private final void 

[12/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
index 7137829..4a879bb 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/HRegion.ObservedExceptionsInBatch.html
@@ -78,124 +78,124 @@
 070import 
java.util.concurrent.locks.ReadWriteLock;
 071import 
java.util.concurrent.locks.ReentrantReadWriteLock;
 072import java.util.function.Function;
-073import 
org.apache.commons.collections.CollectionUtils;
-074import 
org.apache.hadoop.conf.Configuration;
-075import org.apache.hadoop.fs.FileStatus;
-076import org.apache.hadoop.fs.FileSystem;
-077import 
org.apache.hadoop.fs.LocatedFileStatus;
-078import org.apache.hadoop.fs.Path;
-079import org.apache.hadoop.hbase.Cell;
-080import 
org.apache.hadoop.hbase.CellBuilderType;
-081import 
org.apache.hadoop.hbase.CellComparator;
-082import 
org.apache.hadoop.hbase.CellComparatorImpl;
-083import 
org.apache.hadoop.hbase.CellScanner;
-084import 
org.apache.hadoop.hbase.CellUtil;
-085import 
org.apache.hadoop.hbase.CompareOperator;
-086import 
org.apache.hadoop.hbase.CompoundConfiguration;
-087import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-088import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-089import 
org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
-090import 
org.apache.hadoop.hbase.HConstants;
-091import 
org.apache.hadoop.hbase.HConstants.OperationStatusCode;
-092import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-093import 
org.apache.hadoop.hbase.KeyValue;
-094import 
org.apache.hadoop.hbase.KeyValueUtil;
-095import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-096import 
org.apache.hadoop.hbase.NotServingRegionException;
-097import 
org.apache.hadoop.hbase.PrivateCellUtil;
-098import 
org.apache.hadoop.hbase.RegionTooBusyException;
-099import 
org.apache.hadoop.hbase.TableName;
-100import org.apache.hadoop.hbase.Tag;
-101import org.apache.hadoop.hbase.TagUtil;
-102import 
org.apache.hadoop.hbase.UnknownScannerException;
-103import 
org.apache.hadoop.hbase.client.Append;
-104import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-105import 
org.apache.hadoop.hbase.client.CompactionState;
-106import 
org.apache.hadoop.hbase.client.Delete;
-107import 
org.apache.hadoop.hbase.client.Durability;
-108import 
org.apache.hadoop.hbase.client.Get;
-109import 
org.apache.hadoop.hbase.client.Increment;
-110import 
org.apache.hadoop.hbase.client.IsolationLevel;
-111import 
org.apache.hadoop.hbase.client.Mutation;
-112import 
org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
-113import 
org.apache.hadoop.hbase.client.Put;
-114import 
org.apache.hadoop.hbase.client.RegionInfo;
-115import 
org.apache.hadoop.hbase.client.RegionInfoBuilder;
-116import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-117import 
org.apache.hadoop.hbase.client.Result;
-118import 
org.apache.hadoop.hbase.client.RowMutations;
-119import 
org.apache.hadoop.hbase.client.Scan;
-120import 
org.apache.hadoop.hbase.client.TableDescriptor;
-121import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-122import 
org.apache.hadoop.hbase.conf.ConfigurationManager;
-123import 
org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
-124import 
org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
-125import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
-126import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-127import 
org.apache.hadoop.hbase.exceptions.TimeoutIOException;
-128import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-129import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-130import 
org.apache.hadoop.hbase.filter.FilterWrapper;
-131import 
org.apache.hadoop.hbase.filter.IncompatibleFilterException;
-132import 
org.apache.hadoop.hbase.io.HFileLink;
-133import 
org.apache.hadoop.hbase.io.HeapSize;
-134import 
org.apache.hadoop.hbase.io.TimeRange;
-135import 
org.apache.hadoop.hbase.io.hfile.HFile;
-136import 
org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
-137import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-138import 
org.apache.hadoop.hbase.ipc.RpcCall;
-139import 
org.apache.hadoop.hbase.ipc.RpcServer;
-140import 
org.apache.hadoop.hbase.monitoring.MonitoredTask;
-141import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-142import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-143import 
org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
-144import 

[09/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
index 7d1dba6..11f9915 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
@@ -50,114 +50,114 @@
 042import 
java.util.concurrent.atomic.AtomicBoolean;
 043import 
java.util.concurrent.atomic.AtomicLong;
 044import 
java.util.concurrent.atomic.LongAdder;
-045import 
org.apache.commons.collections.CollectionUtils;
-046import 
org.apache.commons.lang3.mutable.MutableObject;
-047import 
org.apache.hadoop.conf.Configuration;
-048import org.apache.hadoop.fs.Path;
-049import 
org.apache.hadoop.hbase.ByteBufferExtendedCell;
-050import 
org.apache.hadoop.hbase.CacheEvictionStats;
-051import 
org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
-052import org.apache.hadoop.hbase.Cell;
-053import 
org.apache.hadoop.hbase.CellScannable;
-054import 
org.apache.hadoop.hbase.CellScanner;
-055import 
org.apache.hadoop.hbase.CellUtil;
-056import 
org.apache.hadoop.hbase.CompareOperator;
-057import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-058import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-059import 
org.apache.hadoop.hbase.HBaseIOException;
-060import 
org.apache.hadoop.hbase.HConstants;
-061import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-062import 
org.apache.hadoop.hbase.NotServingRegionException;
-063import 
org.apache.hadoop.hbase.PrivateCellUtil;
-064import 
org.apache.hadoop.hbase.RegionTooBusyException;
-065import org.apache.hadoop.hbase.Server;
-066import 
org.apache.hadoop.hbase.ServerName;
-067import 
org.apache.hadoop.hbase.TableName;
-068import 
org.apache.hadoop.hbase.UnknownScannerException;
-069import 
org.apache.hadoop.hbase.client.Append;
-070import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-071import 
org.apache.hadoop.hbase.client.Delete;
-072import 
org.apache.hadoop.hbase.client.Durability;
-073import 
org.apache.hadoop.hbase.client.Get;
-074import 
org.apache.hadoop.hbase.client.Increment;
-075import 
org.apache.hadoop.hbase.client.Mutation;
-076import 
org.apache.hadoop.hbase.client.Put;
-077import 
org.apache.hadoop.hbase.client.RegionInfo;
-078import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-079import 
org.apache.hadoop.hbase.client.Result;
-080import 
org.apache.hadoop.hbase.client.Row;
-081import 
org.apache.hadoop.hbase.client.RowMutations;
-082import 
org.apache.hadoop.hbase.client.Scan;
-083import 
org.apache.hadoop.hbase.client.TableDescriptor;
-084import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-085import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-086import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-087import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-088import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-089import 
org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
-090import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-091import 
org.apache.hadoop.hbase.io.TimeRange;
-092import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-093import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-094import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-095import 
org.apache.hadoop.hbase.ipc.QosPriority;
-096import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-097import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-098import 
org.apache.hadoop.hbase.ipc.RpcServer;
-099import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-100import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-101import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-102import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-103import 
org.apache.hadoop.hbase.ipc.ServerRpcController;
-104import 
org.apache.hadoop.hbase.log.HBaseMarkers;
-105import 
org.apache.hadoop.hbase.master.MasterRpcServices;
-106import 
org.apache.hadoop.hbase.net.Address;
-107import 
org.apache.hadoop.hbase.procedure2.RSProcedureCallable;
-108import 
org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;
-109import 
org.apache.hadoop.hbase.quotas.OperationQuota;
-110import 
org.apache.hadoop.hbase.quotas.QuotaUtil;
-111import 
org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
-112import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-113import 
org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;
-114import 
org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;
-115import 

[15/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
index c4a43e7..0b83610 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.html
@@ -31,137 +31,138 @@
 023import java.util.Set;
 024import java.util.concurrent.TimeUnit;
 025
-026import org.apache.commons.logging.Log;
-027import 
org.apache.commons.logging.LogFactory;
-028import 
org.apache.hadoop.conf.Configuration;
-029import 
org.apache.hadoop.hbase.ScheduledChore;
-030import 
org.apache.hadoop.hbase.client.RegionInfo;
-031import 
org.apache.hadoop.hbase.regionserver.MetricsRegionServer;
-032import 
org.apache.hadoop.hbase.regionserver.Region;
-033import 
org.apache.hadoop.hbase.regionserver.RegionServerServices;
-034import 
org.apache.yetus.audience.InterfaceAudience;
-035
-036/**
-037 * A Chore which sends the region size 
reports on this RegionServer to the Master.
-038 */
-039@InterfaceAudience.Private
-040public class RegionSizeReportingChore 
extends ScheduledChore {
-041  private static final Log LOG = 
LogFactory.getLog(RegionSizeReportingChore.class);
-042
-043  static final String 
REGION_SIZE_REPORTING_CHORE_PERIOD_KEY =
-044  
"hbase.regionserver.quotas.region.size.reporting.chore.period";
-045  static final int 
REGION_SIZE_REPORTING_CHORE_PERIOD_DEFAULT = 1000 * 60;
-046
-047  static final String 
REGION_SIZE_REPORTING_CHORE_DELAY_KEY =
-048  
"hbase.regionserver.quotas.region.size.reporting.chore.delay";
-049  static final long 
REGION_SIZE_REPORTING_CHORE_DELAY_DEFAULT = 1000 * 30;
-050
-051  static final String 
REGION_SIZE_REPORTING_CHORE_TIMEUNIT_KEY =
-052  
"hbase.regionserver.quotas.region.size.reporting.chore.timeunit";
-053  static final String 
REGION_SIZE_REPORTING_CHORE_TIMEUNIT_DEFAULT = TimeUnit.MILLISECONDS.name();
-054
-055  private final RegionServerServices 
rsServices;
-056  private final MetricsRegionServer 
metrics;
-057
-058  public 
RegionSizeReportingChore(RegionServerServices rsServices) {
-059super(
-060
RegionSizeReportingChore.class.getSimpleName(), rsServices,
-061
getPeriod(rsServices.getConfiguration()), 
getInitialDelay(rsServices.getConfiguration()),
-062
getTimeUnit(rsServices.getConfiguration()));
-063this.rsServices = rsServices;
-064this.metrics = 
rsServices.getMetrics();
-065  }
-066
-067  @Override
-068  protected void chore() {
-069final long start = 
System.nanoTime();
-070try {
-071  _chore();
-072} finally {
-073  if (metrics != null) {
-074
metrics.incrementRegionSizeReportingChoreTime(
-075
TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, 
TimeUnit.NANOSECONDS));
-076  }
-077}
-078  }
-079
-080  void _chore() {
-081final RegionServerSpaceQuotaManager 
quotaManager =
-082
rsServices.getRegionServerSpaceQuotaManager();
-083// Get the HRegionInfo for each 
online region
-084HashSetRegionInfo 
onlineRegionInfos = getOnlineRegionInfos(rsServices.getRegions());
-085RegionSizeStore store = 
quotaManager.getRegionSizeStore();
-086// Remove all sizes for non-online 
regions
-087removeNonOnlineRegions(store, 
onlineRegionInfos);
-088
rsServices.reportRegionSizesForQuotas(store);
-089  }
-090
-091  HashSetRegionInfo 
getOnlineRegionInfos(List? extends Region onlineRegions) {
-092HashSetRegionInfo regionInfos 
= new HashSet();
-093onlineRegions.forEach((region) - 
regionInfos.add(region.getRegionInfo()));
-094return regionInfos;
-095  }
-096
-097  void 
removeNonOnlineRegions(RegionSizeStore store, SetRegionInfo 
onlineRegions) {
-098// We have to remove regions which 
are no longer online from the store, otherwise they will
-099// continue to be sent to the Master 
which will prevent size report expiration.
-100if (onlineRegions.isEmpty()) {
-101  // Easy-case, no online regions 
means no size reports
-102  store.clear();
-103  return;
-104}
-105
-106
IteratorEntryRegionInfo,RegionSize iter = store.iterator();
-107int numEntriesRemoved = 0;
-108while (iter.hasNext()) {
-109  EntryRegionInfo,RegionSize 
entry = iter.next();
-110  RegionInfo regionInfo = 
entry.getKey();
-111  if 
(!onlineRegions.contains(regionInfo)) {
-112numEntriesRemoved++;
-113iter.remove();
-114  }
-115}
-116if (LOG.isTraceEnabled()) {
-117  LOG.trace("Removed " + 
numEntriesRemoved + " region sizes before reporting to Master "
-118  + "because they are for 
non-online regions.");
-119}
-120  }
-121
-122  /**
-123   * Extracts 

[03/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html
index af4f61a..1d9d491 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestFlushFromClient.html
@@ -33,160 +33,161 @@
 025import java.util.List;
 026import java.util.concurrent.TimeUnit;
 027import java.util.stream.Collectors;
-028import org.apache.commons.logging.Log;
-029import 
org.apache.commons.logging.LogFactory;
-030import 
org.apache.hadoop.hbase.HBaseClassTestRule;
-031import 
org.apache.hadoop.hbase.HBaseTestingUtility;
-032import 
org.apache.hadoop.hbase.TableName;
-033import 
org.apache.hadoop.hbase.regionserver.HRegion;
-034import 
org.apache.hadoop.hbase.regionserver.HRegionServer;
-035import 
org.apache.hadoop.hbase.testclassification.ClientTests;
-036import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-037import 
org.apache.hadoop.hbase.util.Bytes;
-038import 
org.apache.hadoop.hbase.util.JVMClusterUtil;
-039import org.apache.hadoop.io.IOUtils;
-040import org.junit.After;
-041import org.junit.AfterClass;
-042import org.junit.Before;
-043import org.junit.BeforeClass;
-044import org.junit.ClassRule;
-045import org.junit.Rule;
-046import org.junit.Test;
-047import 
org.junit.experimental.categories.Category;
-048import org.junit.rules.TestName;
-049
-050@Category({MediumTests.class, 
ClientTests.class})
-051public class TestFlushFromClient {
-052
-053  @ClassRule
-054  public static final HBaseClassTestRule 
CLASS_RULE =
-055  
HBaseClassTestRule.forClass(TestFlushFromClient.class);
-056
-057  private static final Log LOG = 
LogFactory.getLog(TestFlushFromClient.class);
-058  private final static 
HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
-059  private static AsyncConnection 
asyncConn;
-060  private static final byte[][] SPLITS = 
new byte[][]{Bytes.toBytes("3"), Bytes.toBytes("7")};
-061  private static final Listbyte[] 
ROWS = Arrays.asList(
-062Bytes.toBytes("1"),
-063Bytes.toBytes("4"),
-064Bytes.toBytes("8"));
-065  private static final byte[] FAMILY = 
Bytes.toBytes("f1");
-066
-067  @Rule
-068  public TestName name = new 
TestName();
-069
-070  public TableName tableName;
-071
-072  @BeforeClass
-073  public static void setUpBeforeClass() 
throws Exception {
-074
TEST_UTIL.startMiniCluster(ROWS.size());
-075asyncConn = 
ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();
-076  }
-077
-078  @AfterClass
-079  public static void tearDownAfterClass() 
throws Exception {
-080IOUtils.cleanup(null, asyncConn);
-081TEST_UTIL.shutdownMiniCluster();
-082  }
-083
-084  @Before
-085  public void setUp() throws Exception 
{
-086tableName = 
TableName.valueOf(name.getMethodName());
-087try (Table t = 
TEST_UTIL.createTable(tableName, FAMILY, SPLITS)) {
-088  ListPut puts = 
ROWS.stream().map(r - new Put(r)).collect(Collectors.toList());
-089  for (int i = 0; i != 20; ++i) {
-090byte[] value = 
Bytes.toBytes(i);
-091puts.forEach(p - 
p.addColumn(FAMILY, value, value));
-092  }
-093  t.put(puts);
-094}
-095
assertFalse(getRegionInfo().isEmpty());
-096
assertTrue(getRegionInfo().stream().allMatch(r - r.getMemStoreDataSize() != 
0));
-097  }
-098
-099  @After
-100  public void tearDown() throws Exception 
{
-101for (TableDescriptor htd : 
TEST_UTIL.getAdmin().listTableDescriptors()) {
-102  LOG.info("Tear down, remove table=" 
+ htd.getTableName());
-103  
TEST_UTIL.deleteTable(htd.getTableName());
-104}
-105  }
-106
-107  @Test
-108  public void testFlushTable() throws 
Exception {
-109try (Admin admin = 
TEST_UTIL.getAdmin()) {
-110  admin.flush(tableName);
-111  
assertFalse(getRegionInfo().stream().anyMatch(r - r.getMemStoreDataSize() 
!= 0));
-112}
-113  }
-114
-115  @Test
-116  public void testAsyncFlushTable() 
throws Exception {
-117AsyncAdmin admin = 
asyncConn.getAdmin();
-118admin.flush(tableName).get();
-119
assertFalse(getRegionInfo().stream().anyMatch(r - r.getMemStoreDataSize() 
!= 0));
-120  }
-121
-122  @Test
-123  public void testFlushRegion() throws 
Exception {
-124try (Admin admin = 
TEST_UTIL.getAdmin()) {
-125  for (HRegion r : getRegionInfo()) 
{
-126
admin.flushRegion(r.getRegionInfo().getRegionName());
-127TimeUnit.SECONDS.sleep(1);
-128assertEquals(0, 
r.getMemStoreDataSize());
-129  }
-130}
-131  }
-132
-133  @Test
-134  public void testAsyncFlushRegion() 
throws Exception {
-135AsyncAdmin admin = 
asyncConn.getAdmin();
-136for (HRegion r : getRegionInfo()) 

[16/27] hbase-site git commit: Published site at 914de1141699142bce1486468a742233d9440b23.

2018-04-20 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/1facf1d3/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html
index e080cd6..73a1036 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.html
@@ -42,604 +42,605 @@
 034import java.util.function.Predicate;
 035import java.util.stream.Collectors;
 036
-037import 
org.apache.commons.lang.builder.HashCodeBuilder;
-038import org.apache.commons.logging.Log;
-039import 
org.apache.commons.logging.LogFactory;
-040import 
org.apache.hadoop.conf.Configuration;
-041import org.apache.hadoop.fs.FileStatus;
-042import org.apache.hadoop.fs.FileSystem;
-043import org.apache.hadoop.fs.Path;
-044import 
org.apache.hadoop.hbase.TableName;
-045import 
org.apache.hadoop.hbase.client.Connection;
-046import 
org.apache.hadoop.hbase.client.Get;
-047import 
org.apache.hadoop.hbase.client.Put;
-048import 
org.apache.hadoop.hbase.client.Result;
-049import 
org.apache.hadoop.hbase.client.Table;
-050import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-051import 
org.apache.hadoop.hbase.snapshot.SnapshotManifest;
-052import 
org.apache.hadoop.hbase.util.FSUtils;
-053import 
org.apache.hadoop.hbase.util.HFileArchiveUtil;
-054import 
org.apache.hadoop.util.StringUtils;
-055import 
org.apache.yetus.audience.InterfaceAudience;
-056
-057import 
org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap;
-058import 
org.apache.hbase.thirdparty.com.google.common.collect.Multimap;
-059import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
-060
-061import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-062import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest;
-064import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.FamilyFiles;
-065import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile;
-066
-067/**
-068 * Tracks file archiving and updates the 
hbase quota table.
-069 */
-070@InterfaceAudience.Private
-071public class FileArchiverNotifierImpl 
implements FileArchiverNotifier {
-072  private static final Log LOG = 
LogFactory.getLog(FileArchiverNotifierImpl.class);
-073  private final Connection conn;
-074  private final Configuration conf;
-075  private final FileSystem fs;
-076  private final TableName tn;
-077  private final ReadLock readLock;
-078  private final WriteLock writeLock;
-079  private volatile long lastFullCompute = 
Long.MIN_VALUE;
-080  private ListString 
currentSnapshots = Collections.emptyList();
-081  private static final 
MapString,Object NAMESPACE_LOCKS = new HashMap();
-082
-083  /**
-084   * An Exception thrown when 
SnapshotSize updates to hbase:quota fail to be written.
-085   */
-086  @InterfaceAudience.Private
-087  public static class 
QuotaSnapshotSizeSerializationException extends IOException {
-088private static final long 
serialVersionUID = 1L;
-089
-090public 
QuotaSnapshotSizeSerializationException(String msg) {
-091  super(msg);
-092}
-093  }
-094
-095  public FileArchiverNotifierImpl(
-096  Connection conn, Configuration 
conf, FileSystem fs, TableName tn) {
-097this.conn = conn;
-098this.conf = conf;
-099this.fs = fs;
-100this.tn = tn;
-101ReentrantReadWriteLock lock = new 
ReentrantReadWriteLock();
-102readLock = lock.readLock();
-103writeLock = lock.writeLock();
-104  }
-105
-106  static synchronized Object 
getLockForNamespace(String namespace) {
-107return 
NAMESPACE_LOCKS.computeIfAbsent(namespace, (ns) - new Object());
-108  }
-109
-110  /**
-111   * Returns a strictly-increasing 
measure of time extracted by {@link System#nanoTime()}.
-112   */
-113  long getLastFullCompute() {
-114return lastFullCompute;
-115  }
-116
-117  @Override
-118  public void 
addArchivedFiles(SetEntryString, Long fileSizes) throws 
IOException {
-119long start = System.nanoTime();
-120readLock.lock();
-121try {
-122  // We want to catch the case where 
we got an archival request, but there was a full
-123  // re-computation in progress that 
was blocking us. Most likely, the full computation is going
-124  // to already include the changes 
we were going to make.
-125  //
-126  // Same as "start  
lastFullCompute" but avoiding numeric overflow per the
-127  // System.nanoTime() javadoc
-128  if (lastFullCompute != 
Long.MIN_VALUE  start - lastFullCompute  0) {
-129if 

[1/6] hbase git commit: HBASE-20439 Clean up incorrect use of commons-logging in hbase-server

2018-04-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-2 af4dd3eb3 -> 24eb141ba
  refs/heads/branch-2.0 bfada2876 -> 2795e8bcf


HBASE-20439 Clean up incorrect use of commons-logging in hbase-server

Signed-off-by: Umesh Agashe 
Signed-off-by: Yu Li 

 Conflicts:

hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java

hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.java

hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.java
 Additions:

hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java

hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/944ecc72
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/944ecc72
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/944ecc72

Branch: refs/heads/branch-2
Commit: 944ecc729c4fb228624d6ca646b689db1678b658
Parents: af4dd3e
Author: Sean Busbey 
Authored: Tue Apr 17 14:40:25 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 08:04:17 2018 -0500

--
 .../hbase/mapreduce/TestCellBasedHFileOutputFormat2.java| 6 +++---
 .../hadoop/hbase/mapreduce/TestCellBasedImportExport2.java  | 6 +++---
 .../apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.java | 7 ---
 .../hbase/regionserver/throttle/StoreHotnessProtector.java  | 9 ++---
 .../org/apache/hadoop/hbase/TestClusterPortAssignment.java  | 7 ---
 .../org/apache/hadoop/hbase/client/TestFlushFromClient.java | 7 ---
 .../hadoop/hbase/client/TestSeparateClientZKCluster.java| 7 ---
 .../hadoop/hbase/procedure/TestFailedProcCleanup.java   | 7 ---
 .../java/org/apache/hadoop/hbase/wal/TestDisabledWAL.java   | 7 ---
 9 files changed, 36 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/944ecc72/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java
index 42ef337..9367b81 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java
@@ -39,8 +39,6 @@ import java.util.Set;
 import java.util.concurrent.Callable;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -108,6 +106,8 @@ import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Simple test for {@link HFileOutputFormat2}.
@@ -132,7 +132,7 @@ public class TestCellBasedHFileOutputFormat2  {
 
   private HBaseTestingUtility util = new HBaseTestingUtility();
 
-  private static final Log LOG = 
LogFactory.getLog(TestCellBasedHFileOutputFormat2.class);
+  private static final Logger LOG = 
LoggerFactory.getLogger(TestCellBasedHFileOutputFormat2.class);
 
   /**
* Simple mapper that makes KeyValue output.

http://git-wip-us.apache.org/repos/asf/hbase/blob/944ecc72/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
index 4a22699..5392a74 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
@@ -34,8 +34,6 @@ import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -89,6 +87,8 @@ import org.junit.experimental.categories.Category;
 import 

[5/6] hbase git commit: HBASE-20442 clean up incorrect use of commons-collections 3

2018-04-20 Thread busbey
HBASE-20442 clean up incorrect use of commons-collections 3

Signed-off-by: Umesh Agashe 
Signed-off-by: Yu Li 

 Conflicts:

hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java

hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java

hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.java

hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java

hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
 Additional:

hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java

hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java

hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2795e8bc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2795e8bc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2795e8bc

Branch: refs/heads/branch-2.0
Commit: 2795e8bcf6f2ac162da1658950bae612d54d27b7
Parents: 80cd8ed
Author: Sean Busbey 
Authored: Tue Apr 17 16:15:11 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 08:45:02 2018 -0500

--
 .../main/java/org/apache/hadoop/hbase/client/RowMutations.java   | 1 +
 .../src/main/java/org/apache/hadoop/hbase/util/Bytes.java| 1 +
 .../java/org/apache/hadoop/hbase/regionserver/StoreScanner.java  | 2 +-
 .../src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java   | 4 ++--
 4 files changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2795e8bc/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
index 1eb3151..31217ad 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
@@ -22,6 +22,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
+
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.CollectionUtils;
 import org.apache.yetus.audience.InterfaceAudience;

http://git-wip-us.apache.org/repos/asf/hbase/blob/2795e8bc/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index b7912fd..6eb09c1 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -49,6 +49,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+import 
org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;
 
 import com.google.protobuf.ByteString;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/2795e8bc/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
index 2bc1e01..1ca1faa 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreScanner.java
@@ -45,13 +45,13 @@ import 
org.apache.hadoop.hbase.regionserver.handler.ParallelSeekHandler;
 import 
org.apache.hadoop.hbase.regionserver.querymatcher.CompactionScanQueryMatcher;
 import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher;
 import org.apache.hadoop.hbase.regionserver.querymatcher.UserScanQueryMatcher;
-import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
 import org.apache.hadoop.hbase.util.CollectionUtils;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
 
 

[4/6] hbase git commit: HBASE-20439 Clean up incorrect use of commons-logging in hbase-server

2018-04-20 Thread busbey
HBASE-20439 Clean up incorrect use of commons-logging in hbase-server

Signed-off-by: Umesh Agashe 
Signed-off-by: Yu Li 

 Conflicts:

hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java

hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionSizeReportingChore.java

hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.java

hbase-server/src/main/java/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.java

hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/throttle/StoreHotnessProtector.java

hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSeparateClientZKCluster.java
 Additions:

hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java

hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b2825a69
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b2825a69
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b2825a69

Branch: refs/heads/branch-2.0
Commit: b2825a691448c2ab3c669ce30cd543271c2e9155
Parents: bfada28
Author: Sean Busbey 
Authored: Tue Apr 17 14:40:25 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 08:44:47 2018 -0500

--
 .../hbase/mapreduce/TestCellBasedHFileOutputFormat2.java  | 6 +++---
 .../hadoop/hbase/mapreduce/TestCellBasedImportExport2.java| 6 +++---
 .../org/apache/hadoop/hbase/TestClusterPortAssignment.java| 7 ---
 .../org/apache/hadoop/hbase/client/TestFlushFromClient.java   | 7 ---
 .../apache/hadoop/hbase/procedure/TestFailedProcCleanup.java  | 7 ---
 .../java/org/apache/hadoop/hbase/wal/TestDisabledWAL.java | 7 ---
 6 files changed, 22 insertions(+), 18 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b2825a69/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java
index 42ef337..9367b81 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java
@@ -39,8 +39,6 @@ import java.util.Set;
 import java.util.concurrent.Callable;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -108,6 +106,8 @@ import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Simple test for {@link HFileOutputFormat2}.
@@ -132,7 +132,7 @@ public class TestCellBasedHFileOutputFormat2  {
 
   private HBaseTestingUtility util = new HBaseTestingUtility();
 
-  private static final Log LOG = 
LogFactory.getLog(TestCellBasedHFileOutputFormat2.class);
+  private static final Logger LOG = 
LoggerFactory.getLogger(TestCellBasedHFileOutputFormat2.class);
 
   /**
* Simple mapper that makes KeyValue output.

http://git-wip-us.apache.org/repos/asf/hbase/blob/b2825a69/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
index 4a22699..5392a74 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedImportExport2.java
@@ -34,8 +34,6 @@ import java.net.URL;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -89,6 +87,8 @@ import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
 import 

[2/6] hbase git commit: HBASE-20442 clean up incorrect use of commons-collections 3

2018-04-20 Thread busbey
HBASE-20442 clean up incorrect use of commons-collections 3

Signed-off-by: Umesh Agashe 
Signed-off-by: Yu Li 

 Conflicts:

hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/24eb141b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/24eb141b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/24eb141b

Branch: refs/heads/branch-2
Commit: 24eb141bacdb48bc7b958a217698a0565f9c2c4e
Parents: 9740168
Author: Sean Busbey 
Authored: Tue Apr 17 16:15:11 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 08:17:22 2018 -0500

--
 .../main/java/org/apache/hadoop/hbase/client/RowMutations.java   | 3 ++-
 .../src/main/java/org/apache/hadoop/hbase/util/Bytes.java| 2 +-
 .../hadoop/hbase/replication/ZKReplicationQueueStorage.java  | 2 +-
 .../main/java/org/apache/hadoop/hbase/regionserver/HRegion.java  | 2 +-
 .../java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java | 2 +-
 .../java/org/apache/hadoop/hbase/regionserver/StoreScanner.java  | 2 +-
 .../org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java | 2 +-
 .../src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java   | 4 ++--
 8 files changed, 10 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/24eb141b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
index 4b426cf..345e26a 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
@@ -23,10 +23,11 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
-import org.apache.commons.collections.CollectionUtils;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
 
+import 
org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;
+
 /**
  * Performs multiple mutations atomically on a single row.
  * Currently {@link Put} and {@link Delete} are supported.

http://git-wip-us.apache.org/repos/asf/hbase/blob/24eb141b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index a315fd2..6eb09c1 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -38,7 +38,6 @@ import java.util.Comparator;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.commons.collections.CollectionUtils;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.KeyValue;
@@ -50,6 +49,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+import 
org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;
 
 import com.google.protobuf.ByteString;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/24eb141b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.java
--
diff --git 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.java
 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.java
index 6d72128..b9ebfb9 100644
--- 
a/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.java
+++ 
b/hbase-replication/src/main/java/org/apache/hadoop/hbase/replication/ZKReplicationQueueStorage.java
@@ -29,7 +29,6 @@ import java.util.Set;
 import java.util.SortedSet;
 import java.util.TreeSet;
 import java.util.stream.Collectors;
-import org.apache.commons.collections.CollectionUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HConstants;
@@ -53,6 +52,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+import 
org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;
 
 /**
  * ZK based replication queue 

[6/6] hbase git commit: HBASE-20440 Clean up incorrect use of commons-lang 2.y

2018-04-20 Thread busbey
HBASE-20440 Clean up incorrect use of commons-lang 2.y

Signed-off-by: Umesh Agashe 
Signed-off-by: Yu Li 

 Conflicts:

hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.java

hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/80cd8edf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/80cd8edf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/80cd8edf

Branch: refs/heads/branch-2.0
Commit: 80cd8edf862720efc3c3325ddbf892b9b4cb2387
Parents: b2825a6
Author: Sean Busbey 
Authored: Tue Apr 17 15:21:49 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 08:45:02 2018 -0500

--
 .../src/main/java/org/apache/hadoop/hbase/net/Address.java   | 2 +-
 .../hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java   | 2 +-
 .../hadoop/hbase/util/compaction/TestMajorCompactionRequest.java | 4 ++--
 3 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/80cd8edf/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
index 9d7f65c..ab7fa3b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase.net;
 
-import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 
 import org.apache.hbase.thirdparty.com.google.common.net.HostAndPort;

http://git-wip-us.apache.org/repos/asf/hbase/blob/80cd8edf/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java
index feea086..6c20b5b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java
@@ -18,7 +18,7 @@
 package org.apache.hadoop.hbase.regionserver;
 
 import java.io.IOException;
-import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseClassTestRule;

http://git-wip-us.apache.org/repos/asf/hbase/blob/80cd8edf/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.java
index b626481..adecd5c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.java
@@ -22,7 +22,7 @@ import java.util.List;
 import java.util.Optional;
 import java.util.Set;
 import java.util.stream.Collectors;
-import org.apache.commons.lang.RandomStringUtils;
+import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -167,4 +167,4 @@ public class TestMajorCompactionRequest {
 
doReturn(mock(Connection.class)).when(spy).getConnection(eq(configuration));
 return spy;
   }
-}
\ No newline at end of file
+}



[3/6] hbase git commit: HBASE-20440 Clean up incorrect use of commons-lang 2.y

2018-04-20 Thread busbey
HBASE-20440 Clean up incorrect use of commons-lang 2.y

Signed-off-by: Umesh Agashe 
Signed-off-by: Yu Li 

 Conflicts:

hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.java

hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9740168f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9740168f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9740168f

Branch: refs/heads/branch-2
Commit: 9740168fa8fce1f5bfe7e6ad4ee46f1757761256
Parents: 944ecc7
Author: Sean Busbey 
Authored: Tue Apr 17 15:21:49 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 08:17:22 2018 -0500

--
 .../src/main/java/org/apache/hadoop/hbase/net/Address.java   | 2 +-
 .../hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java   | 2 +-
 .../hadoop/hbase/util/compaction/TestMajorCompactionRequest.java | 4 ++--
 3 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9740168f/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
index 9d7f65c..ab7fa3b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase.net;
 
-import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 
 import org.apache.hbase.thirdparty.com.google.common.net.HostAndPort;

http://git-wip-us.apache.org/repos/asf/hbase/blob/9740168f/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java
index feea086..6c20b5b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java
@@ -18,7 +18,7 @@
 package org.apache.hadoop.hbase.regionserver;
 
 import java.io.IOException;
-import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseClassTestRule;

http://git-wip-us.apache.org/repos/asf/hbase/blob/9740168f/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.java
index b626481..adecd5c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/compaction/TestMajorCompactionRequest.java
@@ -22,7 +22,7 @@ import java.util.List;
 import java.util.Optional;
 import java.util.Set;
 import java.util.stream.Collectors;
-import org.apache.commons.lang.RandomStringUtils;
+import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -167,4 +167,4 @@ public class TestMajorCompactionRequest {
 
doReturn(mock(Connection.class)).when(spy).getConnection(eq(configuration));
 return spy;
   }
-}
\ No newline at end of file
+}



[1/3] hbase git commit: HBASE-20439 Clean up incorrect use of commons-logging in hbase-server

2018-04-20 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/master 70377babd -> 914de1141


HBASE-20439 Clean up incorrect use of commons-logging in hbase-server

Signed-off-by: Umesh Agashe 
Signed-off-by: Yu Li 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b1fc00e1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b1fc00e1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b1fc00e1

Branch: refs/heads/master
Commit: b1fc00e16f4bb559d039d9dc162cae8042414661
Parents: 70377ba
Author: Sean Busbey 
Authored: Tue Apr 17 14:40:25 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 07:27:18 2018 -0500

--
 .../apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.java | 7 ---
 .../hadoop/hbase/quotas/FileArchiverNotifierImpl.java   | 7 ---
 .../hadoop/hbase/quotas/RegionSizeReportingChore.java   | 7 ---
 .../org/apache/hadoop/hbase/quotas/RegionSizeStoreImpl.java | 7 ---
 .../hbase/regionserver/throttle/StoreHotnessProtector.java  | 9 ++---
 .../org/apache/hadoop/hbase/TestClusterPortAssignment.java  | 7 ---
 .../org/apache/hadoop/hbase/client/TestFlushFromClient.java | 7 ---
 .../hadoop/hbase/client/TestSeparateClientZKCluster.java| 7 ---
 .../hadoop/hbase/procedure/TestFailedProcCleanup.java   | 7 ---
 .../java/org/apache/hadoop/hbase/wal/TestDisabledWAL.java   | 7 ---
 10 files changed, 42 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b1fc00e1/hbase-server/src/main/java/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.java
index 8f735bd..550aea7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/zksyncer/ClientZKSyncer.java
@@ -25,8 +25,6 @@ import java.util.Map;
 import java.util.concurrent.ArrayBlockingQueue;
 import java.util.concurrent.BlockingQueue;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.Server;
 import org.apache.hadoop.hbase.util.Threads;
@@ -37,6 +35,9 @@ import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
  * Tracks the target znode(s) on server ZK cluster and synchronize them to 
client ZK cluster if
  * changed
@@ -45,7 +46,7 @@ import org.apache.zookeeper.KeeperException;
  */
 @InterfaceAudience.Private
 public abstract class ClientZKSyncer extends ZKListener {
-  private static final Log LOG = LogFactory.getLog(ClientZKSyncer.class);
+  private static final Logger LOG = 
LoggerFactory.getLogger(ClientZKSyncer.class);
   private final Server server;
   private final ZKWatcher clientZkWatcher;
   // We use queues and daemon threads to synchronize the data to client ZK 
cluster

http://git-wip-us.apache.org/repos/asf/hbase/blob/b1fc00e1/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java
index 8cde9c1..58434f7 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java
@@ -35,8 +35,6 @@ import java.util.function.Predicate;
 import java.util.stream.Collectors;
 
 import org.apache.commons.lang.builder.HashCodeBuilder;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -54,6 +52,9 @@ import org.apache.hadoop.hbase.util.HFileArchiveUtil;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.hbase.thirdparty.com.google.common.collect.HashMultimap;
 import org.apache.hbase.thirdparty.com.google.common.collect.Multimap;
 import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
@@ -69,7 +70,7 @@ import 

[2/3] hbase git commit: HBASE-20440 Clean up incorrect use of commons-lang 2.y

2018-04-20 Thread busbey
HBASE-20440 Clean up incorrect use of commons-lang 2.y

Signed-off-by: Umesh Agashe 
Signed-off-by: Yu Li 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/09749f15
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/09749f15
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/09749f15

Branch: refs/heads/master
Commit: 09749f157486b3633d5fb0876b0b3afa01919968
Parents: b1fc00e
Author: Sean Busbey 
Authored: Tue Apr 17 15:21:49 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 07:29:23 2018 -0500

--
 .../src/main/java/org/apache/hadoop/hbase/net/Address.java   | 2 +-
 .../hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.java | 4 ++--
 .../org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java | 2 +-
 .../hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java   | 2 +-
 .../hadoop/hbase/util/compaction/TestMajorCompactionRequest.java | 4 ++--
 5 files changed, 7 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/09749f15/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
--
diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
index 9d7f65c..ab7fa3b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase.net;
 
-import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 
 import org.apache.hbase.thirdparty.com.google.common.net.HostAndPort;

http://git-wip-us.apache.org/repos/asf/hbase/blob/09749f15/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.java
index 3d21518..5b6d8c1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierFactoryImpl.java
@@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.quotas;
 import java.util.Objects;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.commons.lang.builder.HashCodeBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hbase.TableName;
@@ -111,4 +111,4 @@ public final class FileArchiverNotifierFactoryImpl 
implements FileArchiverNotifi
   return "CacheKey[TableName=" + tn + "]";
 }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/09749f15/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java
index 58434f7..aa91696 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/FileArchiverNotifierImpl.java
@@ -34,7 +34,7 @@ import 
java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
 import java.util.function.Predicate;
 import java.util.stream.Collectors;
 
-import org.apache.commons.lang.builder.HashCodeBuilder;
+import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;

http://git-wip-us.apache.org/repos/asf/hbase/blob/09749f15/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java
index feea086..6c20b5b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHdfsSnapshotHRegion.java
@@ -18,7 +18,7 @@
 

[3/3] hbase git commit: HBASE-20442 clean up incorrect use of commons-collections 3

2018-04-20 Thread busbey
HBASE-20442 clean up incorrect use of commons-collections 3

Signed-off-by: Umesh Agashe 
Signed-off-by: Yu Li 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/914de114
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/914de114
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/914de114

Branch: refs/heads/master
Commit: 914de1141699142bce1486468a742233d9440b23
Parents: 09749f1
Author: Sean Busbey 
Authored: Tue Apr 17 16:15:11 2018 -0500
Committer: Sean Busbey 
Committed: Fri Apr 20 07:30:34 2018 -0500

--
 .../org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java  | 3 ++-
 .../main/java/org/apache/hadoop/hbase/client/RowMutations.java   | 3 ++-
 .../src/main/java/org/apache/hadoop/hbase/util/Bytes.java| 2 +-
 .../hadoop/hbase/replication/ZKReplicationQueueStorage.java  | 2 +-
 .../main/java/org/apache/hadoop/hbase/regionserver/HRegion.java  | 2 +-
 .../java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java | 2 +-
 .../java/org/apache/hadoop/hbase/regionserver/StoreScanner.java  | 2 +-
 .../org/apache/hadoop/hbase/regionserver/wal/FSWALEntry.java | 2 +-
 .../src/main/java/org/apache/hadoop/hbase/wal/WALSplitter.java   | 4 ++--
 9 files changed, 12 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/914de114/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java
--
diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java
 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java
index 093ef76..5ce11d1 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java
@@ -24,7 +24,6 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.collections.MapUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -42,6 +41,8 @@ import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import org.apache.hbase.thirdparty.org.apache.commons.collections4.MapUtils;
+
 /**
  * Implementation of a log cleaner that checks if a log is still scheduled for 
incremental backup
  * before deleting it when its TTL is over.

http://git-wip-us.apache.org/repos/asf/hbase/blob/914de114/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
index 4b426cf..345e26a 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
@@ -23,10 +23,11 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 
-import org.apache.commons.collections.CollectionUtils;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
 
+import 
org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;
+
 /**
  * Performs multiple mutations atomically on a single row.
  * Currently {@link Put} and {@link Delete} are supported.

http://git-wip-us.apache.org/repos/asf/hbase/blob/914de114/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index a315fd2..6eb09c1 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -38,7 +38,6 @@ import java.util.Comparator;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.commons.collections.CollectionUtils;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.KeyValue;
@@ -50,6 +49,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
+import 
org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;
 
 import com.google.protobuf.ByteString;