[2/4] hbase git commit: HBASE-19173 Configure IntegrationTestRSGroup automatically for minicluster mode

2017-11-03 Thread apurtell
HBASE-19173 Configure IntegrationTestRSGroup automatically for minicluster mode


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4d7c40af
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4d7c40af
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4d7c40af

Branch: refs/heads/branch-1.4
Commit: 4d7c40af9b7fb8945b0ce582e8d5b322e7d91fc5
Parents: 28c7315
Author: Andrew Purtell 
Authored: Fri Nov 3 17:38:32 2017 -0700
Committer: Andrew Purtell 
Committed: Fri Nov 3 23:27:56 2017 -0700

--
 .../apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java| 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4d7c40af/hbase-it/src/test/rsgroup/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
--
diff --git 
a/hbase-it/src/test/rsgroup/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
 
b/hbase-it/src/test/rsgroup/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
index e5bb995..622811c 100644
--- 
a/hbase-it/src/test/rsgroup/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
+++ 
b/hbase-it/src/test/rsgroup/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
@@ -21,8 +21,10 @@ package org.apache.hadoop.hbase.rsgroup;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
 import org.apache.hadoop.hbase.Waiter;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.testclassification.IntegrationTests;
 import org.junit.After;
 import org.junit.Before;
@@ -45,6 +47,10 @@ public class IntegrationTestRSGroup extends TestRSGroupsBase 
{
   LOG.info("Setting up IntegrationTestGroup");
   LOG.info("Initializing cluster with " + NUM_SLAVES_BASE + " servers");
   TEST_UTIL = new IntegrationTestingUtility();
+  
TEST_UTIL.getConfiguration().set(HConstants.HBASE_MASTER_LOADBALANCER_CLASS,
+RSGroupBasedLoadBalancer.class.getName());
+  
TEST_UTIL.getConfiguration().set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY,
+RSGroupAdminEndpoint.class.getName());
   
((IntegrationTestingUtility)TEST_UTIL).initializeCluster(NUM_SLAVES_BASE);
   //set shared configs
   admin = TEST_UTIL.getHBaseAdmin();



[1/4] hbase git commit: HBASE-19173 Configure IntegrationTestRSGroup automatically for minicluster mode

2017-11-03 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 1e227acd6 -> 51b65707b
  refs/heads/branch-1.4 28c7315e0 -> 4d7c40af9
  refs/heads/branch-2 3a8e3704c -> 65f620a85
  refs/heads/master 888e584a3 -> ac6b998af


HBASE-19173 Configure IntegrationTestRSGroup automatically for minicluster mode


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/51b65707
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/51b65707
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/51b65707

Branch: refs/heads/branch-1
Commit: 51b65707b35a444b68a2375e69162b50969deb1c
Parents: 1e227ac
Author: Andrew Purtell 
Authored: Fri Nov 3 17:38:32 2017 -0700
Committer: Andrew Purtell 
Committed: Fri Nov 3 23:27:52 2017 -0700

--
 .../apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java| 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/51b65707/hbase-it/src/test/rsgroup/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
--
diff --git 
a/hbase-it/src/test/rsgroup/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
 
b/hbase-it/src/test/rsgroup/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
index e5bb995..622811c 100644
--- 
a/hbase-it/src/test/rsgroup/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
+++ 
b/hbase-it/src/test/rsgroup/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
@@ -21,8 +21,10 @@ package org.apache.hadoop.hbase.rsgroup;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
 import org.apache.hadoop.hbase.Waiter;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.testclassification.IntegrationTests;
 import org.junit.After;
 import org.junit.Before;
@@ -45,6 +47,10 @@ public class IntegrationTestRSGroup extends TestRSGroupsBase 
{
   LOG.info("Setting up IntegrationTestGroup");
   LOG.info("Initializing cluster with " + NUM_SLAVES_BASE + " servers");
   TEST_UTIL = new IntegrationTestingUtility();
+  
TEST_UTIL.getConfiguration().set(HConstants.HBASE_MASTER_LOADBALANCER_CLASS,
+RSGroupBasedLoadBalancer.class.getName());
+  
TEST_UTIL.getConfiguration().set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY,
+RSGroupAdminEndpoint.class.getName());
   
((IntegrationTestingUtility)TEST_UTIL).initializeCluster(NUM_SLAVES_BASE);
   //set shared configs
   admin = TEST_UTIL.getHBaseAdmin();



[3/4] hbase git commit: HBASE-19173 Configure IntegrationTestRSGroup automatically for minicluster mode

2017-11-03 Thread apurtell
HBASE-19173 Configure IntegrationTestRSGroup automatically for minicluster mode


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ac6b998a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ac6b998a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ac6b998a

Branch: refs/heads/master
Commit: ac6b998afe033cbb6a307d249c8e18bb97d54c9f
Parents: 888e584
Author: Andrew Purtell 
Authored: Fri Nov 3 17:38:32 2017 -0700
Committer: Andrew Purtell 
Committed: Fri Nov 3 23:30:54 2017 -0700

--
 .../apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java| 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ac6b998a/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
index 5c099c8..b10e54a 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
@@ -21,8 +21,10 @@ package org.apache.hadoop.hbase.rsgroup;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
 import org.apache.hadoop.hbase.Waiter;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.testclassification.IntegrationTests;
 import org.junit.After;
 import org.junit.Before;
@@ -43,6 +45,10 @@ public class IntegrationTestRSGroup extends TestRSGroupsBase 
{
   LOG.info("Setting up IntegrationTestRSGroup");
   LOG.info("Initializing cluster with " + NUM_SLAVES_BASE + " servers");
   TEST_UTIL = new IntegrationTestingUtility();
+  
TEST_UTIL.getConfiguration().set(HConstants.HBASE_MASTER_LOADBALANCER_CLASS,
+RSGroupBasedLoadBalancer.class.getName());
+  
TEST_UTIL.getConfiguration().set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY,
+RSGroupAdminEndpoint.class.getName());
   
((IntegrationTestingUtility)TEST_UTIL).initializeCluster(NUM_SLAVES_BASE);
   //set shared configs
   admin = TEST_UTIL.getAdmin();



[4/4] hbase git commit: HBASE-19173 Configure IntegrationTestRSGroup automatically for minicluster mode

2017-11-03 Thread apurtell
HBASE-19173 Configure IntegrationTestRSGroup automatically for minicluster mode


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/65f620a8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/65f620a8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/65f620a8

Branch: refs/heads/branch-2
Commit: 65f620a85698f9c3fdd9b8c8f2fd69f9bb7cf825
Parents: 3a8e370
Author: Andrew Purtell 
Authored: Fri Nov 3 17:38:32 2017 -0700
Committer: Andrew Purtell 
Committed: Fri Nov 3 23:30:54 2017 -0700

--
 .../apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java| 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/65f620a8/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
--
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
index 5c099c8..b10e54a 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/rsgroup/IntegrationTestRSGroup.java
@@ -21,8 +21,10 @@ package org.apache.hadoop.hbase.rsgroup;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.IntegrationTestingUtility;
 import org.apache.hadoop.hbase.Waiter;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.testclassification.IntegrationTests;
 import org.junit.After;
 import org.junit.Before;
@@ -43,6 +45,10 @@ public class IntegrationTestRSGroup extends TestRSGroupsBase 
{
   LOG.info("Setting up IntegrationTestRSGroup");
   LOG.info("Initializing cluster with " + NUM_SLAVES_BASE + " servers");
   TEST_UTIL = new IntegrationTestingUtility();
+  
TEST_UTIL.getConfiguration().set(HConstants.HBASE_MASTER_LOADBALANCER_CLASS,
+RSGroupBasedLoadBalancer.class.getName());
+  
TEST_UTIL.getConfiguration().set(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY,
+RSGroupAdminEndpoint.class.getName());
   
((IntegrationTestingUtility)TEST_UTIL).initializeCluster(NUM_SLAVES_BASE);
   //set shared configs
   admin = TEST_UTIL.getAdmin();



[1/2] hbase git commit: HBASE-19716 Remove hbase-native-client from branch-2

2017-11-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 2485da1a6 -> 3a8e3704c


http://git-wip-us.apache.org/repos/asf/hbase/blob/3a8e3704/hbase-native-client/src/core/hbase_macros.h
--
diff --git a/hbase-native-client/src/core/hbase_macros.h 
b/hbase-native-client/src/core/hbase_macros.h
deleted file mode 100644
index 71765c8..000
--- a/hbase-native-client/src/core/hbase_macros.h
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#ifndef CORE_HBASE_MACROS_H_
-#define CORE_HBASE_MACROS_H_
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/**
- * The following code block define API as the tag for exported
- * functions. The library should be compiled with symbols visibility
- * set to hidden by default and only the exported functions should be
- * tagged as HBASE_API.
- *
- * When building the library on Windows, compile with compiler flag
- * "-D_LIBHBASE_IMPLEMENTATION_", whereas when linking application with
- * this library, this compiler flag should not be used.
- */
-#if defined _WIN32 || defined __CYGWIN__
-  #ifdef _LIBHBASE_IMPLEMENTATION_
-  #define API __declspec(dllexport)
-  #else
-#ifdef _LIBHBASE_TEST_
-  #define HBASE_API
-#else
-  #define HBASE_API __declspec(dllimport)
-#endif
-  #endif
-#else
-  #if __GNUC__ >= 4
-#define HBASE_API __attribute__ ((visibility ("default")))
-  #else
-#define HBASE_API
-  #endif
-#endif
-
-#ifdef __cplusplus
-}  // extern "C"
-#endif  // __cplusplus
-
-#endif  // CORE_HBASE_MACROS_H_
-

http://git-wip-us.apache.org/repos/asf/hbase/blob/3a8e3704/hbase-native-client/src/core/hbase_types.h
--
diff --git a/hbase-native-client/src/core/hbase_types.h 
b/hbase-native-client/src/core/hbase_types.h
deleted file mode 100644
index 8889b92..000
--- a/hbase-native-client/src/core/hbase_types.h
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-#ifndef CORE_HBASE_TYPES_H_
-#define CORE_HBASE_TYPES_H_
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-#include 
-#include 
-
-typedef unsigned char hb_byte_t;
-
-/*
- * Base kv type.
- */
-typedef struct {
-  hb_byte_t* row;
-  size_t row_length;
-
-  char * family;
-  size_t family_length;
-
-  hb_byte_t* qual;
-  size_t qual_length;
-
-  hb_byte_t* value;
-  size_t value_length;
-
-  uint64_t timestamp;
-} hb_cell_t;
-
-typedef enum {
-  DELETE_ONE_VERSION,
-  DELETE_MULTIPLE_VERSIONS,
-  DELETE_FAMILY,
-  DELETE_FAMILY_VERSION
-} hb_delete_type;
-
-typedef enum {
-  USE_DEFAULT,
-  SKIP_WAL,
-  ASYNC_WAL,
-  SYNC_WAL,
-  HSYNC_WAL
-} hb_durability_type;
-
-typedef void* hb_admin_t;
-typedef void* hb_client_t;
-typedef void* hb_connection_attr_t;
-typedef void* hb_connection_t;
-typedef void* hb_get_t;
-typedef void* hb_mutation_t;
-typedef void* hb_put_t;
-typedef void* hb_delete_t;
-typedef void* hb_increment_t;
-typedef void* hb_append_t;
-typedef void* hb_result_t;
-typedef void* hb_scanner_t;
-
-#ifdef __cplusplus
-}  // extern "C"
-#endif  // __cplusplus
-
-#endif  // CORE_HBASE_TYPES_H_

http://git-wip-us.apache.org/repos/asf/hbase/blob/3a8e3704/hbase-native-client/src/core/mutation.cc
--
diff --git a/hbase-native-client/src/core/mutation.cc 
b/hbase-native-client/src/core/mutation.cc
deleted 

[2/2] hbase git commit: HBASE-19716 Remove hbase-native-client from branch-2

2017-11-03 Thread stack
HBASE-19716 Remove hbase-native-client from branch-2

Signed-off-by: Josh Elser 
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3a8e3704
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3a8e3704
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3a8e3704

Branch: refs/heads/branch-2
Commit: 3a8e3704cd338bed91856798ceb1edfd70a7f9a4
Parents: 2485da1
Author: Michael Stack 
Authored: Fri Nov 3 17:51:45 2017 -0700
Committer: Michael Stack 
Committed: Fri Nov 3 23:18:24 2017 -0700

--
 dev-support/hbase_nightly_source-artifact.sh|   3 +-
 hbase-native-client/.gitignore  |  38 -
 hbase-native-client/CMakeLists.txt  | 157 ---
 hbase-native-client/README.md   |  53 ---
 hbase-native-client/bin/build-all.sh|  41 -
 hbase-native-client/bin/build-thirdparty.sh |  64 
 hbase-native-client/bin/download-thirdparty.sh  |  70 -
 hbase-native-client/bin/hbase-client-env.sh |  47 --
 .../cmake_modules/FindGTest.cmake   |  53 ---
 .../cmake_modules/FindLibEv.cmake   |  47 --
 hbase-native-client/src/async/CMakeLists.txt|  32 
 hbase-native-client/src/async/get-test.cc   |  59 ---
 hbase-native-client/src/async/hbase_admin.cc|  57 ---
 hbase-native-client/src/async/hbase_admin.h |  69 
 hbase-native-client/src/async/hbase_client.cc   |  47 --
 hbase-native-client/src/async/hbase_client.h|  60 ---
 .../src/async/hbase_connection.cc   |  37 -
 .../src/async/hbase_connection.h|  52 --
 hbase-native-client/src/async/hbase_errno.h |  23 ---
 hbase-native-client/src/async/hbase_get.cc  |  61 ---
 hbase-native-client/src/async/hbase_get.h   |  73 -
 .../src/async/hbase_mutations.cc| 111 -
 hbase-native-client/src/async/hbase_mutations.h | 119 --
 hbase-native-client/src/async/hbase_result.cc   |  37 -
 hbase-native-client/src/async/hbase_result.h|  44 --
 hbase-native-client/src/async/hbase_scanner.cc  |  59 ---
 hbase-native-client/src/async/hbase_scanner.h   |  80 --
 hbase-native-client/src/async/mutations-test.cc | 102 
 hbase-native-client/src/core/CMakeLists.txt |  31 
 hbase-native-client/src/core/admin.cc   |  20 ---
 hbase-native-client/src/core/admin.h|  25 ---
 hbase-native-client/src/core/client.cc  |  20 ---
 hbase-native-client/src/core/client.h   |  25 ---
 hbase-native-client/src/core/connection.cc  |  22 ---
 hbase-native-client/src/core/connection.h   |  26 ---
 hbase-native-client/src/core/connection_attr.h  |  30 
 hbase-native-client/src/core/delete.cc  |  22 ---
 hbase-native-client/src/core/delete.h   |  29 
 hbase-native-client/src/core/get.cc |  20 ---
 hbase-native-client/src/core/get.h  |  26 ---
 .../src/core/hbase_connection_attr.cc   |  41 -
 .../src/core/hbase_connection_attr.h|  51 --
 hbase-native-client/src/core/hbase_macros.h |  60 ---
 hbase-native-client/src/core/hbase_types.h  |  83 --
 hbase-native-client/src/core/mutation.cc|  42 -
 hbase-native-client/src/core/mutation.h |  48 --
 hbase-native-client/src/core/put.cc |  22 ---
 hbase-native-client/src/core/put.h  |  29 
 hbase-native-client/src/core/scanner.cc |  20 ---
 hbase-native-client/src/core/scanner.h  |  25 ---
 hbase-native-client/src/rpc/CMakeLists.txt  |  17 --
 hbase-native-client/src/sync/CMakeLists.txt |  24 ---
 hbase-native-client/src/sync/hbase_admin.cc |  51 --
 hbase-native-client/src/sync/hbase_admin.h  |  61 ---
 .../src/sync/hbase_connection.cc|  37 -
 hbase-native-client/src/sync/hbase_connection.h |  52 --
 56 files changed, 1 insertion(+), 2653 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3a8e3704/dev-support/hbase_nightly_source-artifact.sh
--
diff --git a/dev-support/hbase_nightly_source-artifact.sh 
b/dev-support/hbase_nightly_source-artifact.sh
index 4e4c6a2..f5ecb00 100755
--- a/dev-support/hbase_nightly_source-artifact.sh
+++ b/dev-support/hbase_nightly_source-artifact.sh
@@ -151,12 +151,11 @@ diff --binary --recursive . "${unpack_dir}" 
>"${working_dir}/diff_output" || tru
 
 cd "${working_dir}"
 # expectation check largely based on HBASE-14952
-echo "Checking against things we don't expect to include in the source tarball 
(git related, hbase-native-client, etc.)"
+echo "Checking against thin

hbase git commit: HBASE-19054 switch precommit image to one from maven

2017-11-03 Thread mdrob
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18477 4b2a0976e -> 9e78c0eb8


HBASE-19054 switch precommit image to one from maven

Amending Author: Mike Drob 
Signed-off-by: Sean Busbey 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9e78c0eb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9e78c0eb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9e78c0eb

Branch: refs/heads/HBASE-18477
Commit: 9e78c0eb825c3193e3b2a24940be18fd8a32aadd
Parents: 4b2a097
Author: Misty Stanley-Jones 
Authored: Fri Nov 3 21:46:52 2017 -0500
Committer: Mike Drob 
Committed: Fri Nov 3 21:47:51 2017 -0500

--
 dev-support/docker/Dockerfile | 149 -
 1 file changed, 15 insertions(+), 134 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9e78c0eb/dev-support/docker/Dockerfile
--
diff --git a/dev-support/docker/Dockerfile b/dev-support/docker/Dockerfile
index 62c6030..49ad14d 100644
--- a/dev-support/docker/Dockerfile
+++ b/dev-support/docker/Dockerfile
@@ -1,4 +1,3 @@
-
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -18,148 +17,30 @@
 # Dockerfile for installing the necessary dependencies for building Hadoop.
 # See BUILDING.txt.
 
+FROM maven:3.5-jdk-8
 
-FROM ubuntu:trusty
-
-WORKDIR /root
-
-ENV DEBIAN_FRONTEND noninteractive
-ENV DEBCONF_TERSE true
-
-##
-# Install common dependencies from packages
-#
-# WARNING: DO NOT PUT JAVA APPS HERE! Otherwise they will install default
-# Ubuntu Java.  See Java section below!
-##
-RUN apt-get -q update && apt-get -q install --no-install-recommends -y \
-build-essential \
-bzip2 \
-cmake \
-curl \
-doxygen \
-fuse \
-g++ \
-gcc \
-git \
-gnupg-agent \
-make \
-libbz2-dev \
-libcurl4-openssl-dev \
-libfuse-dev \
-libperl-critic-perl \
-libprotobuf-dev \
-libprotoc-dev \
-libsnappy-dev \
-libssl-dev \
-libtool \
-pinentry-curses \
-pkg-config \
-protobuf-compiler \
-protobuf-c-compiler \
-python \
-python2.7 \
-python-pip \
-rsync \
-snappy \
-zlib1g-dev
-
-###
-# Oracle Java
-###
-
-RUN echo "dot_style = mega" > "/root/.wgetrc"
-RUN echo "quiet = on" >> "/root/.wgetrc"
-
-RUN apt-get -q update && apt-get -q install --no-install-recommends -y 
software-properties-common
-RUN add-apt-repository -y ppa:webupd8team/java
-
-# Auto-accept the Oracle JDK license
-RUN echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select 
true | sudo /usr/bin/debconf-set-selections
-RUN apt-get -q update && apt-get -q install --no-install-recommends -y 
oracle-java8-installer
-
-
-# Apps that require Java
-###
 RUN apt-get -q update && apt-get -q install --no-install-recommends -y \
-ant \
-maven
-
-# Fixing the Apache commons / Maven dependency problem under Ubuntu:
-# See http://wiki.apache.org/commons/VfsProblems
-RUN cd /usr/share/maven/lib && ln -s ../../java/commons-lang3-3.6.jar .
-
-##
-# Install findbugs
-##
-RUN mkdir -p /opt/findbugs && \
-curl -L -s -S \
- 
https://sourceforge.net/projects/findbugs/files/findbugs/3.0.1/findbugs-noUpdateChecks-3.0.1.tar.gz/download
 \
- -o /opt/findbugs.tar.gz && \
-tar xzf /opt/findbugs.tar.gz --strip-components 1 -C /opt/findbugs
-ENV FINDBUGS_HOME /opt/findbugs
+   bats \
+   findbugs \
+   libperl-critic-perl \
+   pylint \
+   python-dateutil \
+   rsync \
+   ruby \
+   shellcheck \
+   wget \
+   && \
+gem install rake rubocop ruby-lint
 
-
-# Install shellcheck
-
-RUN apt-get -q install -y cabal-install
-RUN mkdir /root/.cabal
-RUN echo "remote-repo: hackage.fpcomplete.com:http://hackage.fpcomplete.com/"; 
>> /root/.cabal/config
-#RUN echo "remote-repo: hackage.haskell.org:http://hackage.haskell.org/"; > 
/root/.cabal/config
-RUN echo "remote-repo-cache: /root/.cabal/packages" >> /root/.cabal/config
-RUN cabal update
-RUN cabal install shellcheck --global
-
-
-# Install bats
-
-RUN add-apt-repository -y ppa:duggan/bats
-RUN apt-get -q update
-RUN apt-get -q install --no-install-recommends -y bats
-
-
-# Install pylint
-
-RUN pip install pylint
-
-
-# Install dateutil.parser
-
-RUN pip install python-dateutil
-
-
-# Install Ruby 2, based on Yetus 0.4.0 dockerfile
-###
-RUN echo 'gem: --no-rdoc --no-ri' >> /root/.gemrc
-RUN apt-get -q install -y ruby2.0
-#
-# on trusty, the above installs ruby2.0 and ruby (1.9.3) exes
-# but update-alternatives is broken, so we need to do some work
-# to make 2.0 actually the 

hbase git commit: HBASE-13622 document upgrade rollback.

2017-11-03 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-2 5df965158 -> 2485da1a6


HBASE-13622 document upgrade rollback.

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2485da1a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2485da1a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2485da1a

Branch: refs/heads/branch-2
Commit: 2485da1a60b8cb51dc5376f7d68de93084cc190f
Parents: 5df9651
Author: Sean Busbey 
Authored: Mon Jun 22 11:19:58 2015 -0500
Committer: Sean Busbey 
Committed: Fri Nov 3 19:49:42 2017 -0500

--
 src/main/asciidoc/_chapters/ops_mgt.adoc   |   1 +
 src/main/asciidoc/_chapters/upgrading.adoc | 133 +++-
 2 files changed, 132 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2485da1a/src/main/asciidoc/_chapters/ops_mgt.adoc
--
diff --git a/src/main/asciidoc/_chapters/ops_mgt.adoc 
b/src/main/asciidoc/_chapters/ops_mgt.adoc
index 771e3be..0941ce0 100644
--- a/src/main/asciidoc/_chapters/ops_mgt.adoc
+++ b/src/main/asciidoc/_chapters/ops_mgt.adoc
@@ -1351,6 +1351,7 @@ image::hbase_replication_diagram.jpg[]
 HBase replication borrows many concepts from the [firstterm]_statement-based 
replication_ design used by MySQL.
 Instead of SQL statements, entire WALEdits (consisting of multiple cell 
inserts coming from Put and Delete operations on the clients) are replicated in 
order to maintain atomicity.
 
+[[hbase.replication.management]]
 === Managing and Configuring Cluster Replication
 .Cluster Configuration Overview
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/2485da1a/src/main/asciidoc/_chapters/upgrading.adoc
--
diff --git a/src/main/asciidoc/_chapters/upgrading.adoc 
b/src/main/asciidoc/_chapters/upgrading.adoc
index 35f38fa..9fc61dc 100644
--- a/src/main/asciidoc/_chapters/upgrading.adoc
+++ b/src/main/asciidoc/_chapters/upgrading.adoc
@@ -67,7 +67,7 @@ In addition to the usual API versioning considerations HBase 
has other compatibi
 
 .File format compatibility
 * Support file formats backward and forward compatible
-* Example: File, ZK encoding, directory layout is upgraded automatically as 
part of an HBase upgrade. User can rollback to the older version and everything 
will continue to work.
+* Example: File, ZK encoding, directory layout is upgraded automatically as 
part of an HBase upgrade. User can downgrade to the older version and 
everything will continue to work.
 
 .Client API compatibility
 * Allow changing or removing existing client APIs.
@@ -111,7 +111,7 @@ for warning about incompatible changes). All effort will be 
made to provide a de
 | | Major | Minor | Patch
 |Client-Server wire Compatibility|  N |Y |Y
 |Server-Server Compatibility |N |Y |Y
-|File Format Compatibility | N 
footnote:[comp_matrix_offline_upgrade_note,Running an offline upgrade tool 
without rollback might be needed. We will typically only support migrating data 
from major version X to major version X+1.] | Y |Y
+|File Format Compatibility | N 
footnote:[comp_matrix_offline_upgrade_note,Running an offline upgrade tool 
without downgrade might be needed. We will typically only support migrating 
data from major version X to major version X+1.] | Y |Y
 |Client API Compatibility  | N | Y |Y
 |Client Binary Compatibility | N | N |Y
 4+|Server-Side Limited API Compatibility
@@ -193,6 +193,135 @@ Unless otherwise specified, HBase point versions are 
binary compatible. You can
 
 In the minor version-particular sections below, we call out where the versions 
are wire/protocol compatible and in this case, it is also possible to do a 
<>. For example, in <>, we 
state that it is possible to do a rolling upgrade between hbase-0.98.x and 
hbase-1.0.0.
 
+== Rollback
+
+Sometimes things don't go as planned when attempting an upgrade. This section 
explains how to perform a _rollback_ to an earlier HBase release. Note that 
this should only be needed between Major and some Minor releases. You should 
always be able to _downgrade_ between HBase Patch releases within the same 
Minor version. These instructions may require you to take steps before you 
start the upgrade process, so be sure to read through this section beforehand.
+
+=== Caveats
+
+.Rollback vs Downgrade
+This section describes how to perform a _rollback_ on an upgrade between HBase 
minor and major versions. In this document, rollback refers to the process of 
taking an upgraded cluster and restoring it to the old version _while losing 
all changes that have occurred since upgrade_. By contrast, a cluster 
_downgrade_ would restore an upgraded cluster to the old version while 
maintaining any data written 

hbase git commit: HBASE-13622 document upgrade rollback.

2017-11-03 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/master 8e0571a3a -> 888e584a3


HBASE-13622 document upgrade rollback.

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/888e584a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/888e584a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/888e584a

Branch: refs/heads/master
Commit: 888e584a3290b285f1cd94ed1452ad562138b511
Parents: 8e0571a
Author: Sean Busbey 
Authored: Mon Jun 22 11:19:58 2015 -0500
Committer: Sean Busbey 
Committed: Fri Nov 3 19:48:26 2017 -0500

--
 src/main/asciidoc/_chapters/ops_mgt.adoc   |   1 +
 src/main/asciidoc/_chapters/upgrading.adoc | 133 +++-
 2 files changed, 132 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/888e584a/src/main/asciidoc/_chapters/ops_mgt.adoc
--
diff --git a/src/main/asciidoc/_chapters/ops_mgt.adoc 
b/src/main/asciidoc/_chapters/ops_mgt.adoc
index d6d74e7..b6babd6 100644
--- a/src/main/asciidoc/_chapters/ops_mgt.adoc
+++ b/src/main/asciidoc/_chapters/ops_mgt.adoc
@@ -1395,6 +1395,7 @@ image::hbase_replication_diagram.jpg[]
 HBase replication borrows many concepts from the [firstterm]_statement-based 
replication_ design used by MySQL.
 Instead of SQL statements, entire WALEdits (consisting of multiple cell 
inserts coming from Put and Delete operations on the clients) are replicated in 
order to maintain atomicity.
 
+[[hbase.replication.management]]
 === Managing and Configuring Cluster Replication
 .Cluster Configuration Overview
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/888e584a/src/main/asciidoc/_chapters/upgrading.adoc
--
diff --git a/src/main/asciidoc/_chapters/upgrading.adoc 
b/src/main/asciidoc/_chapters/upgrading.adoc
index 9225abd..f84eae9 100644
--- a/src/main/asciidoc/_chapters/upgrading.adoc
+++ b/src/main/asciidoc/_chapters/upgrading.adoc
@@ -67,7 +67,7 @@ In addition to the usual API versioning considerations HBase 
has other compatibi
 
 .File format compatibility
 * Support file formats backward and forward compatible
-* Example: File, ZK encoding, directory layout is upgraded automatically as 
part of an HBase upgrade. User can rollback to the older version and everything 
will continue to work.
+* Example: File, ZK encoding, directory layout is upgraded automatically as 
part of an HBase upgrade. User can downgrade to the older version and 
everything will continue to work.
 
 .Client API compatibility
 * Allow changing or removing existing client APIs.
@@ -111,7 +111,7 @@ for warning about incompatible changes). All effort will be 
made to provide a de
 | | Major | Minor | Patch
 |Client-Server wire Compatibility|  N |Y |Y
 |Server-Server Compatibility |N |Y |Y
-|File Format Compatibility | N 
footnote:[comp_matrix_offline_upgrade_note,Running an offline upgrade tool 
without rollback might be needed. We will typically only support migrating data 
from major version X to major version X+1.] | Y |Y
+|File Format Compatibility | N 
footnote:[comp_matrix_offline_upgrade_note,Running an offline upgrade tool 
without downgrade might be needed. We will typically only support migrating 
data from major version X to major version X+1.] | Y |Y
 |Client API Compatibility  | N | Y |Y
 |Client Binary Compatibility | N | N |Y
 4+|Server-Side Limited API Compatibility
@@ -193,6 +193,135 @@ Unless otherwise specified, HBase point versions are 
binary compatible. You can
 
 In the minor version-particular sections below, we call out where the versions 
are wire/protocol compatible and in this case, it is also possible to do a 
<>. For example, in <>, we 
state that it is possible to do a rolling upgrade between hbase-0.98.x and 
hbase-1.0.0.
 
+== Rollback
+
+Sometimes things don't go as planned when attempting an upgrade. This section 
explains how to perform a _rollback_ to an earlier HBase release. Note that 
this should only be needed between Major and some Minor releases. You should 
always be able to _downgrade_ between HBase Patch releases within the same 
Minor version. These instructions may require you to take steps before you 
start the upgrade process, so be sure to read through this section beforehand.
+
+=== Caveats
+
+.Rollback vs Downgrade
+This section describes how to perform a _rollback_ on an upgrade between HBase 
minor and major versions. In this document, rollback refers to the process of 
taking an upgraded cluster and restoring it to the old version _while losing 
all changes that have occurred since upgrade_. By contrast, a cluster 
_downgrade_ would restore an upgraded cluster to the old version while 
maintaining any data written sinc

[2/2] hbase git commit: HBASE-19068 Change all url of apache.org from HTTP to HTTPS in HBase book

2017-11-03 Thread janh
HBASE-19068 Change all url of apache.org from HTTP to HTTPS in HBase book

Signed-off-by: Jan Hentschel 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8e0571a3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8e0571a3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8e0571a3

Branch: refs/heads/master
Commit: 8e0571a3a412d8fdeb8de4581aa251116602caf5
Parents: 125f3ea
Author: Yung-An He 
Authored: Mon Oct 30 15:56:16 2017 +0800
Committer: Jan Hentschel 
Committed: Fri Nov 3 23:54:18 2017 +0100

--
 .../appendix_contributing_to_documentation.adoc |  6 +-
 src/main/asciidoc/_chapters/architecture.adoc   | 80 ++--
 src/main/asciidoc/_chapters/asf.adoc|  4 +-
 src/main/asciidoc/_chapters/configuration.adoc  | 16 ++--
 src/main/asciidoc/_chapters/cp.adoc | 10 +--
 src/main/asciidoc/_chapters/datamodel.adoc  | 26 +++
 src/main/asciidoc/_chapters/developer.adoc  | 34 -
 src/main/asciidoc/_chapters/external_apis.adoc  |  8 +-
 src/main/asciidoc/_chapters/faq.adoc|  4 +-
 .../asciidoc/_chapters/getting_started.adoc |  4 +-
 src/main/asciidoc/_chapters/hbase-default.adoc  | 10 +--
 src/main/asciidoc/_chapters/hbase_apis.adoc |  2 +-
 src/main/asciidoc/_chapters/mapreduce.adoc  | 28 +++
 src/main/asciidoc/_chapters/ops_mgt.adoc| 22 +++---
 src/main/asciidoc/_chapters/performance.adoc| 26 +++
 src/main/asciidoc/_chapters/preface.adoc|  4 +-
 src/main/asciidoc/_chapters/rpc.adoc|  2 +-
 src/main/asciidoc/_chapters/schema_design.adoc  | 20 ++---
 src/main/asciidoc/_chapters/security.adoc   | 10 +--
 src/main/asciidoc/_chapters/spark.adoc  |  4 +-
 src/main/asciidoc/_chapters/sql.adoc|  4 +-
 .../_chapters/thrift_filter_language.adoc   |  2 +-
 src/main/asciidoc/_chapters/tracing.adoc|  4 +-
 .../asciidoc/_chapters/troubleshooting.adoc | 10 +--
 src/main/asciidoc/_chapters/unit_testing.adoc   |  2 +-
 src/main/asciidoc/_chapters/upgrading.adoc  |  8 +-
 src/main/asciidoc/_chapters/zookeeper.adoc  |  6 +-
 src/main/asciidoc/book.adoc |  2 +-
 src/site/asciidoc/acid-semantics.adoc   |  2 +-
 src/site/asciidoc/cygwin.adoc   |  6 +-
 src/site/asciidoc/index.adoc|  2 +-
 src/site/asciidoc/metrics.adoc  |  6 +-
 src/site/asciidoc/old_news.adoc | 14 ++--
 src/site/asciidoc/sponsors.adoc |  4 +-
 src/site/xdoc/metrics.xml   |  4 +-
 35 files changed, 198 insertions(+), 198 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8e0571a3/src/main/asciidoc/_chapters/appendix_contributing_to_documentation.adoc
--
diff --git 
a/src/main/asciidoc/_chapters/appendix_contributing_to_documentation.adoc 
b/src/main/asciidoc/_chapters/appendix_contributing_to_documentation.adoc
index 26a843d..a603c16 100644
--- a/src/main/asciidoc/_chapters/appendix_contributing_to_documentation.adoc
+++ b/src/main/asciidoc/_chapters/appendix_contributing_to_documentation.adoc
@@ -35,9 +35,9 @@ including the documentation.
 
 In HBase, documentation includes the following areas, and probably some others:
 
-* The link:http://hbase.apache.org/book.html[HBase Reference
+* The link:https://hbase.apache.org/book.html[HBase Reference
   Guide] (this book)
-* The link:http://hbase.apache.org/[HBase website]
+* The link:https://hbase.apache.org/[HBase website]
 * API documentation
 * Command-line utility output and help text
 * Web UI strings, explicit help text, context-sensitive strings, and others
@@ -126,7 +126,7 @@ This directory also stores images used in the HBase 
Reference Guide.
 
 The website's pages are written in an HTML-like XML dialect called xdoc, which
 has a reference guide at
-http://maven.apache.org/archives/maven-1.x/plugins/xdoc/reference/xdocs.html.
+https://maven.apache.org/archives/maven-1.x/plugins/xdoc/reference/xdocs.html.
 You can edit these files in a plain-text editor, an IDE, or an XML editor such
 as XML Mind XML Editor (XXE) or Oxygen XML Author.
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/8e0571a3/src/main/asciidoc/_chapters/architecture.adoc
--
diff --git a/src/main/asciidoc/_chapters/architecture.adoc 
b/src/main/asciidoc/_chapters/architecture.adoc
index 0f02a79..edf3a3b 100644
--- a/src/main/asciidoc/_chapters/architecture.adoc
+++ b/src/main/asciidoc/_chapters/architecture.adoc
@@ -101,7 +101,7 @@ The `hbase:meta` table structure is as follows:
 
 .Values
 
-* `info:regioninfo` (serialized 
link:http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/H

[1/2] hbase git commit: HBASE-19068 Change all url of apache.org from HTTP to HTTPS in HBase book

2017-11-03 Thread janh
Repository: hbase
Updated Branches:
  refs/heads/master 125f3eace -> 8e0571a3a


http://git-wip-us.apache.org/repos/asf/hbase/blob/8e0571a3/src/main/asciidoc/_chapters/performance.adoc
--
diff --git a/src/main/asciidoc/_chapters/performance.adoc 
b/src/main/asciidoc/_chapters/performance.adoc
index f1d89b5..c917646 100644
--- a/src/main/asciidoc/_chapters/performance.adoc
+++ b/src/main/asciidoc/_chapters/performance.adoc
@@ -320,7 +320,7 @@ See also <> for compression 
caveats.
 [[schema.regionsize]]
 === Table RegionSize
 
-The regionsize can be set on a per-table basis via `setFileSize` on 
link:http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html[HTableDescriptor]
 in the event where certain tables require different regionsizes than the 
configured default regionsize.
+The regionsize can be set on a per-table basis via `setFileSize` on 
link:https://hbase.apache.org/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html[HTableDescriptor]
 in the event where certain tables require different regionsizes than the 
configured default regionsize.
 
 See <> for more information.
 
@@ -372,7 +372,7 @@ Bloom filters are enabled on a Column Family.
 You can do this by using the setBloomFilterType method of HColumnDescriptor or 
using the HBase API.
 Valid values are `NONE`, `ROW` (default), or `ROWCOL`.
 See <> for more information on `ROW` versus `ROWCOL`.
-See also the API documentation for 
link:http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html[HColumnDescriptor].
+See also the API documentation for 
link:https://hbase.apache.org/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html[HColumnDescriptor].
 
 The following example creates a table and enables a ROWCOL Bloom filter on the 
`colfam1` column family.
 
@@ -431,7 +431,7 @@ The blocksize can be configured for each ColumnFamily in a 
table, and defaults t
 Larger cell values require larger blocksizes.
 There is an inverse relationship between blocksize and the resulting StoreFile 
indexes (i.e., if the blocksize is doubled then the resulting indexes should be 
roughly halved).
 
-See 
link:http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html[HColumnDescriptor]
 and <>for more information.
+See 
link:https://hbase.apache.org/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html[HColumnDescriptor]
 and <>for more information.
 
 [[cf.in.memory]]
 === In-Memory ColumnFamilies
@@ -440,7 +440,7 @@ ColumnFamilies can optionally be defined as in-memory.
 Data is still persisted to disk, just like any other ColumnFamily.
 In-memory blocks have the highest priority in the <>, but it is 
not a guarantee that the entire table will be in memory.
 
-See 
link:http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html[HColumnDescriptor]
 for more information.
+See 
link:https://hbase.apache.org/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html[HColumnDescriptor]
 for more information.
 
 [[perf.compression]]
 === Compression
@@ -549,7 +549,7 @@ If deferred log flush is used, WAL edits are kept in memory 
until the flush peri
 The benefit is aggregated and asynchronous `WAL`- writes, but the potential 
downside is that if the RegionServer goes down the yet-to-be-flushed edits are 
lost.
 This is safer, however, than not using WAL at all with Puts.
 
-Deferred log flush can be configured on tables via 
link:http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html[HTableDescriptor].
+Deferred log flush can be configured on tables via 
link:https://hbase.apache.org/apidocs/org/apache/hadoop/hbase/HTableDescriptor.html[HTableDescriptor].
 The default value of `hbase.regionserver.optionallogflushinterval` is 1000ms.
 
 [[perf.hbase.client.putwal]]
@@ -574,7 +574,7 @@ There is a utility `HTableUtil` currently on MASTER that 
does this, but you can
 [[perf.hbase.write.mr.reducer]]
 === MapReduce: Skip The Reducer
 
-When writing a lot of data to an HBase table from a MR job (e.g., with 
link:http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.html[TableOutputFormat]),
 and specifically where Puts are being emitted from the Mapper, skip the 
Reducer step.
+When writing a lot of data to an HBase table from a MR job (e.g., with 
link:https://hbase.apache.org/apidocs/org/apache/hadoop/hbase/mapreduce/TableOutputFormat.html[TableOutputFormat]),
 and specifically where Puts are being emitted from the Mapper, skip the 
Reducer step.
 When a Reducer step is used, all of the output (Puts) from the Mapper will get 
spooled to disk, then sorted/shuffled to other Reducers that will most likely 
be off-node.
 It's far more efficient to just write directly to HBase.
 
@@ -600,7 +600,7 @@ For example, here is a good general thread on what to look 
at addressing read-ti
 [[perf.hbase.client.caching]]
 === Scan Caching
 
-If HBase is used as an input source for a MapRe

[2/4] hbase git commit: HBASE-19144 [RSgroups] Retry assignments in FAILED_OPEN state when servers (re)join the cluster

2017-11-03 Thread apurtell
HBASE-19144 [RSgroups] Retry assignments in FAILED_OPEN state when servers 
(re)join the cluster


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1e227acd
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1e227acd
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1e227acd

Branch: refs/heads/branch-1
Commit: 1e227acd654bde80e27799ae16e5a806467141c8
Parents: e61f6ff
Author: Andrew Purtell 
Authored: Fri Nov 3 15:03:08 2017 -0700
Committer: Andrew Purtell 
Committed: Fri Nov 3 15:05:24 2017 -0700

--
 .../hbase/rsgroup/RSGroupInfoManager.java   |  4 +
 .../hbase/rsgroup/RSGroupInfoManagerImpl.java   | 90 ++--
 2 files changed, 89 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1e227acd/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
index ab423e9..2330605 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
@@ -36,6 +36,10 @@ import org.apache.hadoop.hbase.net.Address;
  */
 @InterfaceAudience.Private
 public interface RSGroupInfoManager {
+
+  String REASSIGN_WAIT_INTERVAL_KEY = "hbase.rsgroup.reassign.wait";
+  long DEFAULT_REASSIGN_WAIT_INTERVAL = 30 * 1000L;
+
   //Assigned before user tables
   TableName RSGROUP_TABLE_NAME =
   TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, 
"rsgroup");

http://git-wip-us.apache.org/repos/asf/hbase/blob/1e227acd/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
index 80eaefb..cfaa632 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
@@ -66,6 +66,7 @@ import org.apache.hadoop.hbase.constraint.ConstraintException;
 import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
 import org.apache.hadoop.hbase.master.MasterServices;
+import org.apache.hadoop.hbase.master.RegionState;
 import org.apache.hadoop.hbase.master.ServerListener;
 import org.apache.hadoop.hbase.master.procedure.CreateTableProcedure;
 import org.apache.hadoop.hbase.master.procedure.ProcedurePrepareLatch;
@@ -81,6 +82,7 @@ import 
org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
 import org.apache.hadoop.hbase.security.access.AccessControlLists;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ModifyRegionUtils;
+import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.hbase.zookeeper.ZKUtil;
 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
 import org.apache.zookeeper.KeeperException;
@@ -119,6 +121,7 @@ public class RSGroupInfoManagerImpl implements 
RSGroupInfoManager, ServerListene
   private volatile Set prevRSGroups;
   private RSGroupSerDe rsGroupSerDe;
   private DefaultServerUpdater defaultServerUpdater;
+  private FailedOpenUpdater failedOpenUpdater;
   private boolean isInit = false;
 
   public RSGroupInfoManagerImpl(MasterServices master) throws IOException {
@@ -136,8 +139,10 @@ public class RSGroupInfoManagerImpl implements 
RSGroupInfoManager, ServerListene
 refresh();
 rsGroupStartupWorker.start();
 defaultServerUpdater = new DefaultServerUpdater(this);
+Threads.setDaemonThreadRunning(defaultServerUpdater);
+failedOpenUpdater = new FailedOpenUpdater(this);
+Threads.setDaemonThreadRunning(failedOpenUpdater);
 master.getServerManager().registerListener(this);
-defaultServerUpdater.start();
 isInit = true;
   }
 
@@ -493,6 +498,7 @@ public class RSGroupInfoManagerImpl implements 
RSGroupInfoManager, ServerListene
   @Override
   public void serverAdded(ServerName serverName) {
 defaultServerUpdater.serverChanged();
+failedOpenUpdater.serverChanged();
   }
 
   @Override
@@ -503,18 +509,22 @@ public class RSGroupInfoManagerImpl implements 
RSGroupInfoManager, ServerListene
   private static class DefaultServerUpdater extends Thread {
 private static final Log LOG = 
LogFactory.getLog(DefaultServerUpdater.class);
 private RSGroupInfoManagerImpl mgr;

[3/4] hbase git commit: HBASE-19144 [RSgroups] Retry assignments in FAILED_OPEN state when servers (re)join the cluster

2017-11-03 Thread apurtell
HBASE-19144 [RSgroups] Retry assignments in FAILED_OPEN state when servers 
(re)join the cluster


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5df96515
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5df96515
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5df96515

Branch: refs/heads/branch-2
Commit: 5df9651581f599ba9bcbb0def660870ab0398ccc
Parents: b9b0f15
Author: Andrew Purtell 
Authored: Fri Nov 3 15:03:27 2017 -0700
Committer: Andrew Purtell 
Committed: Fri Nov 3 15:08:19 2017 -0700

--
 .../hbase/rsgroup/RSGroupInfoManager.java   |  4 +
 .../hbase/rsgroup/RSGroupInfoManagerImpl.java   | 89 +++-
 2 files changed, 92 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5df96515/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
index c8fee44..3fb40da 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
@@ -36,6 +36,10 @@ import org.apache.hadoop.hbase.net.Address;
  */
 @InterfaceAudience.Private
 public interface RSGroupInfoManager {
+
+  String REASSIGN_WAIT_INTERVAL_KEY = "hbase.rsgroup.reassign.wait";
+  long DEFAULT_REASSIGN_WAIT_INTERVAL = 30 * 1000L;
+
   //Assigned before user tables
   TableName RSGROUP_TABLE_NAME =
   TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, 
"rsgroup");

http://git-wip-us.apache.org/repos/asf/hbase/blob/5df96515/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
index 9520f5f..7cf04c7 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
@@ -36,6 +36,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.Coprocessor;
@@ -65,6 +66,7 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
 import org.apache.hadoop.hbase.master.MasterServices;
 import org.apache.hadoop.hbase.master.ServerListener;
 import org.apache.hadoop.hbase.master.TableStateManager;
+import org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
 import org.apache.hadoop.hbase.net.Address;
 import org.apache.hadoop.hbase.procedure2.Procedure;
 import org.apache.hadoop.hbase.protobuf.ProtobufMagic;
@@ -144,6 +146,7 @@ class RSGroupInfoManagerImpl implements RSGroupInfoManager {
   private Set prevRSGroups = new HashSet<>();
   private final ServerEventsListenerThread serverEventsListenerThread =
   new ServerEventsListenerThread();
+  private FailedOpenUpdaterThread failedOpenUpdaterThread;
 
   private RSGroupInfoManagerImpl(MasterServices masterServices) throws 
IOException {
 this.masterServices = masterServices;
@@ -156,6 +159,9 @@ class RSGroupInfoManagerImpl implements RSGroupInfoManager {
 rsGroupStartupWorker.start();
 serverEventsListenerThread.start();
 
masterServices.getServerManager().registerListener(serverEventsListenerThread);
+failedOpenUpdaterThread = new 
FailedOpenUpdaterThread(masterServices.getConfiguration());
+failedOpenUpdaterThread.start();
+
masterServices.getServerManager().registerListener(failedOpenUpdaterThread);
   }
 
   static RSGroupInfoManager getInstance(MasterServices master) throws 
IOException {
@@ -564,6 +570,26 @@ class RSGroupInfoManagerImpl implements RSGroupInfoManager 
{
 flushConfig(newGroupMap);
   }
 
+  // Called by FailedOpenUpdaterThread
+  private void updateFailedAssignments() {
+// Kick all regions in FAILED_OPEN state
+List stuckAssignments = Lists.newArrayList();
+for (RegionStateNode state:
+
masterServices.getAssignmentManager().getRegionStates().getRegionsInTransition())
 {
+  if (state.isStuck()) {
+stuckAssignments.add(state.getRegionInfo());
+  }
+}
+for (RegionInfo region: stuckAssignments) {
+  LOG.info("Retrying assignment of " + region);
+   

[4/4] hbase git commit: HBASE-19144 [RSgroups] Retry assignments in FAILED_OPEN state when servers (re)join the cluster

2017-11-03 Thread apurtell
HBASE-19144 [RSgroups] Retry assignments in FAILED_OPEN state when servers 
(re)join the cluster


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/28c7315e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/28c7315e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/28c7315e

Branch: refs/heads/branch-1.4
Commit: 28c7315e0b1db7e7fd30ba996b8735ac2d805756
Parents: 7a9e1dd
Author: Andrew Purtell 
Authored: Fri Nov 3 15:03:08 2017 -0700
Committer: Andrew Purtell 
Committed: Fri Nov 3 15:08:46 2017 -0700

--
 .../hbase/rsgroup/RSGroupInfoManager.java   |  4 +
 .../hbase/rsgroup/RSGroupInfoManagerImpl.java   | 90 ++--
 2 files changed, 89 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/28c7315e/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
index ab423e9..2330605 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
@@ -36,6 +36,10 @@ import org.apache.hadoop.hbase.net.Address;
  */
 @InterfaceAudience.Private
 public interface RSGroupInfoManager {
+
+  String REASSIGN_WAIT_INTERVAL_KEY = "hbase.rsgroup.reassign.wait";
+  long DEFAULT_REASSIGN_WAIT_INTERVAL = 30 * 1000L;
+
   //Assigned before user tables
   TableName RSGROUP_TABLE_NAME =
   TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, 
"rsgroup");

http://git-wip-us.apache.org/repos/asf/hbase/blob/28c7315e/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
index 80eaefb..cfaa632 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
@@ -66,6 +66,7 @@ import org.apache.hadoop.hbase.constraint.ConstraintException;
 import org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
 import org.apache.hadoop.hbase.master.MasterServices;
+import org.apache.hadoop.hbase.master.RegionState;
 import org.apache.hadoop.hbase.master.ServerListener;
 import org.apache.hadoop.hbase.master.procedure.CreateTableProcedure;
 import org.apache.hadoop.hbase.master.procedure.ProcedurePrepareLatch;
@@ -81,6 +82,7 @@ import 
org.apache.hadoop.hbase.regionserver.DisabledRegionSplitPolicy;
 import org.apache.hadoop.hbase.security.access.AccessControlLists;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ModifyRegionUtils;
+import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.hbase.zookeeper.ZKUtil;
 import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
 import org.apache.zookeeper.KeeperException;
@@ -119,6 +121,7 @@ public class RSGroupInfoManagerImpl implements 
RSGroupInfoManager, ServerListene
   private volatile Set prevRSGroups;
   private RSGroupSerDe rsGroupSerDe;
   private DefaultServerUpdater defaultServerUpdater;
+  private FailedOpenUpdater failedOpenUpdater;
   private boolean isInit = false;
 
   public RSGroupInfoManagerImpl(MasterServices master) throws IOException {
@@ -136,8 +139,10 @@ public class RSGroupInfoManagerImpl implements 
RSGroupInfoManager, ServerListene
 refresh();
 rsGroupStartupWorker.start();
 defaultServerUpdater = new DefaultServerUpdater(this);
+Threads.setDaemonThreadRunning(defaultServerUpdater);
+failedOpenUpdater = new FailedOpenUpdater(this);
+Threads.setDaemonThreadRunning(failedOpenUpdater);
 master.getServerManager().registerListener(this);
-defaultServerUpdater.start();
 isInit = true;
   }
 
@@ -493,6 +498,7 @@ public class RSGroupInfoManagerImpl implements 
RSGroupInfoManager, ServerListene
   @Override
   public void serverAdded(ServerName serverName) {
 defaultServerUpdater.serverChanged();
+failedOpenUpdater.serverChanged();
   }
 
   @Override
@@ -503,18 +509,22 @@ public class RSGroupInfoManagerImpl implements 
RSGroupInfoManager, ServerListene
   private static class DefaultServerUpdater extends Thread {
 private static final Log LOG = 
LogFactory.getLog(DefaultServerUpdater.class);
 private RSGroupInfoManagerImpl mg

[1/4] hbase git commit: HBASE-19144 [RSgroups] Retry assignments in FAILED_OPEN state when servers (re)join the cluster

2017-11-03 Thread apurtell
Repository: hbase
Updated Branches:
  refs/heads/branch-1 e61f6ff0e -> 1e227acd6
  refs/heads/branch-1.4 7a9e1dd1b -> 28c7315e0
  refs/heads/branch-2 b9b0f15cd -> 5df965158
  refs/heads/master 3e4b86d4d -> 125f3eace


HBASE-19144 [RSgroups] Retry assignments in FAILED_OPEN state when servers 
(re)join the cluster


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/125f3eac
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/125f3eac
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/125f3eac

Branch: refs/heads/master
Commit: 125f3eace9b35e7947721bba5175ca5dc48921e8
Parents: 3e4b86d
Author: Andrew Purtell 
Authored: Fri Nov 3 15:03:27 2017 -0700
Committer: Andrew Purtell 
Committed: Fri Nov 3 15:05:01 2017 -0700

--
 .../hbase/rsgroup/RSGroupInfoManager.java   |  4 +
 .../hbase/rsgroup/RSGroupInfoManagerImpl.java   | 89 +++-
 2 files changed, 92 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/125f3eac/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
index c8fee44..3fb40da 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManager.java
@@ -36,6 +36,10 @@ import org.apache.hadoop.hbase.net.Address;
  */
 @InterfaceAudience.Private
 public interface RSGroupInfoManager {
+
+  String REASSIGN_WAIT_INTERVAL_KEY = "hbase.rsgroup.reassign.wait";
+  long DEFAULT_REASSIGN_WAIT_INTERVAL = 30 * 1000L;
+
   //Assigned before user tables
   TableName RSGROUP_TABLE_NAME =
   TableName.valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, 
"rsgroup");

http://git-wip-us.apache.org/repos/asf/hbase/blob/125f3eac/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
--
diff --git 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
index 9520f5f..7cf04c7 100644
--- 
a/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
+++ 
b/hbase-rsgroup/src/main/java/org/apache/hadoop/hbase/rsgroup/RSGroupInfoManagerImpl.java
@@ -36,6 +36,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.Coprocessor;
@@ -65,6 +66,7 @@ import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
 import org.apache.hadoop.hbase.master.MasterServices;
 import org.apache.hadoop.hbase.master.ServerListener;
 import org.apache.hadoop.hbase.master.TableStateManager;
+import org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
 import org.apache.hadoop.hbase.net.Address;
 import org.apache.hadoop.hbase.procedure2.Procedure;
 import org.apache.hadoop.hbase.protobuf.ProtobufMagic;
@@ -144,6 +146,7 @@ class RSGroupInfoManagerImpl implements RSGroupInfoManager {
   private Set prevRSGroups = new HashSet<>();
   private final ServerEventsListenerThread serverEventsListenerThread =
   new ServerEventsListenerThread();
+  private FailedOpenUpdaterThread failedOpenUpdaterThread;
 
   private RSGroupInfoManagerImpl(MasterServices masterServices) throws 
IOException {
 this.masterServices = masterServices;
@@ -156,6 +159,9 @@ class RSGroupInfoManagerImpl implements RSGroupInfoManager {
 rsGroupStartupWorker.start();
 serverEventsListenerThread.start();
 
masterServices.getServerManager().registerListener(serverEventsListenerThread);
+failedOpenUpdaterThread = new 
FailedOpenUpdaterThread(masterServices.getConfiguration());
+failedOpenUpdaterThread.start();
+
masterServices.getServerManager().registerListener(failedOpenUpdaterThread);
   }
 
   static RSGroupInfoManager getInstance(MasterServices master) throws 
IOException {
@@ -564,6 +570,26 @@ class RSGroupInfoManagerImpl implements RSGroupInfoManager 
{
 flushConfig(newGroupMap);
   }
 
+  // Called by FailedOpenUpdaterThread
+  private void updateFailedAssignments() {
+// Kick all regions in FAILED_OPEN state
+List stuckAssignments = Lists.newArrayList();
+for (RegionStateNode state:
+
masterServices.getAssignmentManager().getRegionStates().getRegionsInTransit

[6/7] hbase git commit: HBASE-19097 update nightly tests to use Apache Yetus 0.6.0

2017-11-03 Thread busbey
HBASE-19097 update nightly tests to use Apache Yetus 0.6.0

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/62b491ef
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/62b491ef
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/62b491ef

Branch: refs/heads/branch-1.2
Commit: 62b491ef5773d76a8b19e68e7045930f4410a321
Parents: cdeb377
Author: Sean Busbey 
Authored: Thu Nov 2 15:45:20 2017 -0500
Committer: Sean Busbey 
Committed: Fri Nov 3 15:33:37 2017 -0500

--
 dev-support/Jenkinsfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/62b491ef/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index dfac5b3..48e2249 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -33,7 +33,7 @@ pipeline {
 TOOLS = "${env.WORKSPACE}/tools"
 // where we check out to across stages
 BASEDIR = "${env.WORKSPACE}/component"
-YETUS_RELEASE = '0.5.0'
+YETUS_RELEASE = '0.6.0'
 // where we'll write everything from different steps.
 OUTPUT_RELATIVE_GENERAL = 'output-general'
 OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"



[7/7] hbase git commit: HBASE-19097 update nightly tests to use Apache Yetus 0.6.0

2017-11-03 Thread busbey
HBASE-19097 update nightly tests to use Apache Yetus 0.6.0

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/51f06c74
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/51f06c74
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/51f06c74

Branch: refs/heads/branch-1.1
Commit: 51f06c742cb1c26b0fc345a1130b3849980a5c0c
Parents: d3e3d75
Author: Sean Busbey 
Authored: Thu Nov 2 15:45:20 2017 -0500
Committer: Sean Busbey 
Committed: Fri Nov 3 15:33:42 2017 -0500

--
 dev-support/Jenkinsfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/51f06c74/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index dfac5b3..48e2249 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -33,7 +33,7 @@ pipeline {
 TOOLS = "${env.WORKSPACE}/tools"
 // where we check out to across stages
 BASEDIR = "${env.WORKSPACE}/component"
-YETUS_RELEASE = '0.5.0'
+YETUS_RELEASE = '0.6.0'
 // where we'll write everything from different steps.
 OUTPUT_RELATIVE_GENERAL = 'output-general'
 OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"



[5/7] hbase git commit: HBASE-19097 update nightly tests to use Apache Yetus 0.6.0

2017-11-03 Thread busbey
HBASE-19097 update nightly tests to use Apache Yetus 0.6.0

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3a880049
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3a880049
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3a880049

Branch: refs/heads/branch-1.3
Commit: 3a8800493fb01cb0de84eb4b9ca08c8ba2bc0f74
Parents: 371e609
Author: Sean Busbey 
Authored: Thu Nov 2 15:45:20 2017 -0500
Committer: Sean Busbey 
Committed: Fri Nov 3 15:33:33 2017 -0500

--
 dev-support/Jenkinsfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3a880049/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index dfac5b3..48e2249 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -33,7 +33,7 @@ pipeline {
 TOOLS = "${env.WORKSPACE}/tools"
 // where we check out to across stages
 BASEDIR = "${env.WORKSPACE}/component"
-YETUS_RELEASE = '0.5.0'
+YETUS_RELEASE = '0.6.0'
 // where we'll write everything from different steps.
 OUTPUT_RELATIVE_GENERAL = 'output-general'
 OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"



[4/7] hbase git commit: HBASE-19097 update nightly tests to use Apache Yetus 0.6.0

2017-11-03 Thread busbey
HBASE-19097 update nightly tests to use Apache Yetus 0.6.0

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7a9e1dd1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7a9e1dd1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7a9e1dd1

Branch: refs/heads/branch-1.4
Commit: 7a9e1dd1b8a4545a813c4b64bddebcacc4db9b64
Parents: f6be6e0
Author: Sean Busbey 
Authored: Thu Nov 2 15:45:20 2017 -0500
Committer: Sean Busbey 
Committed: Fri Nov 3 15:33:28 2017 -0500

--
 dev-support/Jenkinsfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7a9e1dd1/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index dfac5b3..48e2249 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -33,7 +33,7 @@ pipeline {
 TOOLS = "${env.WORKSPACE}/tools"
 // where we check out to across stages
 BASEDIR = "${env.WORKSPACE}/component"
-YETUS_RELEASE = '0.5.0'
+YETUS_RELEASE = '0.6.0'
 // where we'll write everything from different steps.
 OUTPUT_RELATIVE_GENERAL = 'output-general'
 OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"



[3/7] hbase git commit: HBASE-19097 update nightly tests to use Apache Yetus 0.6.0

2017-11-03 Thread busbey
HBASE-19097 update nightly tests to use Apache Yetus 0.6.0

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e61f6ff0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e61f6ff0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e61f6ff0

Branch: refs/heads/branch-1
Commit: e61f6ff0ecf4fcd07ca20abcbfd5ec6f6fc60f52
Parents: 9ad6e042
Author: Sean Busbey 
Authored: Thu Nov 2 15:45:20 2017 -0500
Committer: Sean Busbey 
Committed: Fri Nov 3 15:33:22 2017 -0500

--
 dev-support/Jenkinsfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e61f6ff0/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index dfac5b3..48e2249 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -33,7 +33,7 @@ pipeline {
 TOOLS = "${env.WORKSPACE}/tools"
 // where we check out to across stages
 BASEDIR = "${env.WORKSPACE}/component"
-YETUS_RELEASE = '0.5.0'
+YETUS_RELEASE = '0.6.0'
 // where we'll write everything from different steps.
 OUTPUT_RELATIVE_GENERAL = 'output-general'
 OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"



[1/7] hbase git commit: HBASE-19097 update nightly tests to use Apache Yetus 0.6.0

2017-11-03 Thread busbey
Repository: hbase
Updated Branches:
  refs/heads/branch-1 9ad6e0422 -> e61f6ff0e
  refs/heads/branch-1.1 d3e3d750d -> 51f06c742
  refs/heads/branch-1.2 cdeb37714 -> 62b491ef5
  refs/heads/branch-1.3 371e6093f -> 3a8800493
  refs/heads/branch-1.4 f6be6e0d7 -> 7a9e1dd1b
  refs/heads/branch-2 0fb3bd2e3 -> b9b0f15cd
  refs/heads/master 4985e2730 -> 3e4b86d4d


HBASE-19097 update nightly tests to use Apache Yetus 0.6.0

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3e4b86d4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3e4b86d4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3e4b86d4

Branch: refs/heads/master
Commit: 3e4b86d4d6b3608e22abb1fe8db07073eb486da8
Parents: 4985e27
Author: Sean Busbey 
Authored: Thu Nov 2 15:45:20 2017 -0500
Committer: Sean Busbey 
Committed: Fri Nov 3 15:32:35 2017 -0500

--
 dev-support/Jenkinsfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3e4b86d4/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 5bd41aa..f7ae546 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -33,7 +33,7 @@ pipeline {
 TOOLS = "${env.WORKSPACE}/tools"
 // where we check out to across stages
 BASEDIR = "${env.WORKSPACE}/component"
-YETUS_RELEASE = '0.5.0'
+YETUS_RELEASE = '0.6.0'
 // where we'll write everything from different steps.
 OUTPUT_RELATIVE_GENERAL = 'output-general'
 OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"



[2/7] hbase git commit: HBASE-19097 update nightly tests to use Apache Yetus 0.6.0

2017-11-03 Thread busbey
HBASE-19097 update nightly tests to use Apache Yetus 0.6.0

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b9b0f15c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b9b0f15c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b9b0f15c

Branch: refs/heads/branch-2
Commit: b9b0f15cdbad14620eebf5769c0a3b664ab5afe8
Parents: 0fb3bd2
Author: Sean Busbey 
Authored: Thu Nov 2 15:45:20 2017 -0500
Committer: Sean Busbey 
Committed: Fri Nov 3 15:33:11 2017 -0500

--
 dev-support/Jenkinsfile | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b9b0f15c/dev-support/Jenkinsfile
--
diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 5bd41aa..f7ae546 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -33,7 +33,7 @@ pipeline {
 TOOLS = "${env.WORKSPACE}/tools"
 // where we check out to across stages
 BASEDIR = "${env.WORKSPACE}/component"
-YETUS_RELEASE = '0.5.0'
+YETUS_RELEASE = '0.6.0'
 // where we'll write everything from different steps.
 OUTPUT_RELATIVE_GENERAL = 'output-general'
 OUTPUTDIR_GENERAL = "${env.WORKSPACE}/output-general"



hbase git commit: Revert "HBASE-19102 TestZooKeeperMainServer fails with KeeperException"

2017-11-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1 654712878 -> 9ad6e0422


Revert "HBASE-19102 TestZooKeeperMainServer fails with KeeperException"

This reverts commit 65471287894e970d2cbf9d19959f4f8f52a49549.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9ad6e042
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9ad6e042
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9ad6e042

Branch: refs/heads/branch-1
Commit: 9ad6e0422fe4462f3b9450bd65bb108b636f1a37
Parents: 6547128
Author: Michael Stack 
Authored: Fri Nov 3 12:16:32 2017 -0700
Committer: Michael Stack 
Committed: Fri Nov 3 12:16:32 2017 -0700

--
 .../hadoop/hbase/zookeeper/ZooKeeperMainServer.java| 13 -
 1 file changed, 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9ad6e042/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
index 5415fd6..e81da59 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
@@ -20,9 +20,7 @@
 package org.apache.hadoop.hbase.zookeeper;
 
 import java.io.IOException;
-import java.util.concurrent.TimeUnit;
 
-import org.apache.curator.shaded.com.google.common.base.Stopwatch;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -50,15 +48,6 @@ public class ZooKeeperMainServer {
 public HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(String[] args)
 throws IOException, InterruptedException {
   super(args);
-  // Make sure we are connected before we proceed. Can take a while on 
some systems. If we
-  // run the command without being connected, we get ConnectionLoss 
KeeperErrorConnection...
-  Stopwatch stopWatch = Stopwatch.createStarted();
-  while (!this.zk.getState().isConnected()) {
-Thread.sleep(1);
-if (stopWatch.elapsed(TimeUnit.SECONDS) > 10) {
-  throw new InterruptedException("Failed connect " + this.zk);
-}
-  }
 }
 
 /**
@@ -111,8 +100,6 @@ public class ZooKeeperMainServer {
   }
 }
 // If command-line arguments, run our hack so they are executed.
-// ZOOKEEPER-1897 was committed to zookeeper-3.4.6 but elsewhere in this 
class we say
-// 3.4.6 breaks command-processing; TODO.
 if (hasCommandLineArguments(args)) {
   HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain zkm =
 new HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(newArgs);



hbase git commit: Revert "HBASE-19102 TestZooKeeperMainServer fails with KeeperException"

2017-11-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 9f7df19df -> f6be6e0d7


Revert "HBASE-19102 TestZooKeeperMainServer fails with KeeperException"

This reverts commit 9f7df19dfaff4df275d45e0ba9d0ffcd45612483.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f6be6e0d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f6be6e0d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f6be6e0d

Branch: refs/heads/branch-1.4
Commit: f6be6e0d7b6eb552ba4ee8da61b6cdc4f9d7bb8a
Parents: 9f7df19
Author: Michael Stack 
Authored: Fri Nov 3 12:16:08 2017 -0700
Committer: Michael Stack 
Committed: Fri Nov 3 12:16:08 2017 -0700

--
 .../hadoop/hbase/zookeeper/ZooKeeperMainServer.java| 13 -
 1 file changed, 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f6be6e0d/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
index 5415fd6..e81da59 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
@@ -20,9 +20,7 @@
 package org.apache.hadoop.hbase.zookeeper;
 
 import java.io.IOException;
-import java.util.concurrent.TimeUnit;
 
-import org.apache.curator.shaded.com.google.common.base.Stopwatch;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -50,15 +48,6 @@ public class ZooKeeperMainServer {
 public HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(String[] args)
 throws IOException, InterruptedException {
   super(args);
-  // Make sure we are connected before we proceed. Can take a while on 
some systems. If we
-  // run the command without being connected, we get ConnectionLoss 
KeeperErrorConnection...
-  Stopwatch stopWatch = Stopwatch.createStarted();
-  while (!this.zk.getState().isConnected()) {
-Thread.sleep(1);
-if (stopWatch.elapsed(TimeUnit.SECONDS) > 10) {
-  throw new InterruptedException("Failed connect " + this.zk);
-}
-  }
 }
 
 /**
@@ -111,8 +100,6 @@ public class ZooKeeperMainServer {
   }
 }
 // If command-line arguments, run our hack so they are executed.
-// ZOOKEEPER-1897 was committed to zookeeper-3.4.6 but elsewhere in this 
class we say
-// 3.4.6 breaks command-processing; TODO.
 if (hasCommandLineArguments(args)) {
   HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain zkm =
 new HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(newArgs);



hbase git commit: Revert "HBASE-19102 TestZooKeeperMainServer fails with KeeperException"

2017-11-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 b89a40c99 -> cdeb37714


Revert "HBASE-19102 TestZooKeeperMainServer fails with KeeperException"

This reverts commit b89a40c990580402f70e427eec37194166885fab.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cdeb3771
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cdeb3771
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cdeb3771

Branch: refs/heads/branch-1.2
Commit: cdeb377148cd5adc9b746022fcfe7c75d4d58635
Parents: b89a40c
Author: Michael Stack 
Authored: Fri Nov 3 12:15:14 2017 -0700
Committer: Michael Stack 
Committed: Fri Nov 3 12:15:14 2017 -0700

--
 .../hadoop/hbase/zookeeper/ZooKeeperMainServer.java| 13 -
 1 file changed, 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cdeb3771/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
index 5415fd6..e81da59 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
@@ -20,9 +20,7 @@
 package org.apache.hadoop.hbase.zookeeper;
 
 import java.io.IOException;
-import java.util.concurrent.TimeUnit;
 
-import org.apache.curator.shaded.com.google.common.base.Stopwatch;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -50,15 +48,6 @@ public class ZooKeeperMainServer {
 public HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(String[] args)
 throws IOException, InterruptedException {
   super(args);
-  // Make sure we are connected before we proceed. Can take a while on 
some systems. If we
-  // run the command without being connected, we get ConnectionLoss 
KeeperErrorConnection...
-  Stopwatch stopWatch = Stopwatch.createStarted();
-  while (!this.zk.getState().isConnected()) {
-Thread.sleep(1);
-if (stopWatch.elapsed(TimeUnit.SECONDS) > 10) {
-  throw new InterruptedException("Failed connect " + this.zk);
-}
-  }
 }
 
 /**
@@ -111,8 +100,6 @@ public class ZooKeeperMainServer {
   }
 }
 // If command-line arguments, run our hack so they are executed.
-// ZOOKEEPER-1897 was committed to zookeeper-3.4.6 but elsewhere in this 
class we say
-// 3.4.6 breaks command-processing; TODO.
 if (hasCommandLineArguments(args)) {
   HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain zkm =
 new HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(newArgs);



hbase git commit: Revert "HBASE-19102 TestZooKeeperMainServer fails with KeeperException"

2017-11-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 1937a93c1 -> 371e6093f


Revert "HBASE-19102 TestZooKeeperMainServer fails with KeeperException"

This reverts commit 1937a93c1be5e4dccb0fb193bd7386c9100595db.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/371e6093
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/371e6093
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/371e6093

Branch: refs/heads/branch-1.3
Commit: 371e6093f169085fe5cbaf9b7477ba86b9c44f4e
Parents: 1937a93
Author: Michael Stack 
Authored: Fri Nov 3 12:15:42 2017 -0700
Committer: Michael Stack 
Committed: Fri Nov 3 12:15:42 2017 -0700

--
 .../hadoop/hbase/zookeeper/ZooKeeperMainServer.java| 13 -
 1 file changed, 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/371e6093/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
index 5415fd6..e81da59 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
@@ -20,9 +20,7 @@
 package org.apache.hadoop.hbase.zookeeper;
 
 import java.io.IOException;
-import java.util.concurrent.TimeUnit;
 
-import org.apache.curator.shaded.com.google.common.base.Stopwatch;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -50,15 +48,6 @@ public class ZooKeeperMainServer {
 public HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(String[] args)
 throws IOException, InterruptedException {
   super(args);
-  // Make sure we are connected before we proceed. Can take a while on 
some systems. If we
-  // run the command without being connected, we get ConnectionLoss 
KeeperErrorConnection...
-  Stopwatch stopWatch = Stopwatch.createStarted();
-  while (!this.zk.getState().isConnected()) {
-Thread.sleep(1);
-if (stopWatch.elapsed(TimeUnit.SECONDS) > 10) {
-  throw new InterruptedException("Failed connect " + this.zk);
-}
-  }
 }
 
 /**
@@ -111,8 +100,6 @@ public class ZooKeeperMainServer {
   }
 }
 // If command-line arguments, run our hack so they are executed.
-// ZOOKEEPER-1897 was committed to zookeeper-3.4.6 but elsewhere in this 
class we say
-// 3.4.6 breaks command-processing; TODO.
 if (hasCommandLineArguments(args)) {
   HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain zkm =
 new HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(newArgs);



hbase git commit: HBASE-19102 TestZooKeeperMainServer fails with KeeperException

2017-11-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 b31349243 -> b89a40c99


HBASE-19102 TestZooKeeperMainServer fails with KeeperException

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/b89a40c9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/b89a40c9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/b89a40c9

Branch: refs/heads/branch-1.2
Commit: b89a40c990580402f70e427eec37194166885fab
Parents: b313492
Author: Michael Stack 
Authored: Thu Oct 26 21:05:04 2017 -0700
Committer: Michael Stack 
Committed: Fri Nov 3 11:50:25 2017 -0700

--
 .../hadoop/hbase/zookeeper/ZooKeeperMainServer.java| 13 +
 1 file changed, 13 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/b89a40c9/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
index e81da59..5415fd6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
@@ -20,7 +20,9 @@
 package org.apache.hadoop.hbase.zookeeper;
 
 import java.io.IOException;
+import java.util.concurrent.TimeUnit;
 
+import org.apache.curator.shaded.com.google.common.base.Stopwatch;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -48,6 +50,15 @@ public class ZooKeeperMainServer {
 public HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(String[] args)
 throws IOException, InterruptedException {
   super(args);
+  // Make sure we are connected before we proceed. Can take a while on 
some systems. If we
+  // run the command without being connected, we get ConnectionLoss 
KeeperErrorConnection...
+  Stopwatch stopWatch = Stopwatch.createStarted();
+  while (!this.zk.getState().isConnected()) {
+Thread.sleep(1);
+if (stopWatch.elapsed(TimeUnit.SECONDS) > 10) {
+  throw new InterruptedException("Failed connect " + this.zk);
+}
+  }
 }
 
 /**
@@ -100,6 +111,8 @@ public class ZooKeeperMainServer {
   }
 }
 // If command-line arguments, run our hack so they are executed.
+// ZOOKEEPER-1897 was committed to zookeeper-3.4.6 but elsewhere in this 
class we say
+// 3.4.6 breaks command-processing; TODO.
 if (hasCommandLineArguments(args)) {
   HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain zkm =
 new HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(newArgs);



hbase git commit: HBASE-19102 TestZooKeeperMainServer fails with KeeperException

2017-11-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 71c5794e9 -> 1937a93c1


HBASE-19102 TestZooKeeperMainServer fails with KeeperException

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1937a93c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1937a93c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1937a93c

Branch: refs/heads/branch-1.3
Commit: 1937a93c1be5e4dccb0fb193bd7386c9100595db
Parents: 71c5794
Author: Michael Stack 
Authored: Thu Oct 26 21:05:04 2017 -0700
Committer: Michael Stack 
Committed: Fri Nov 3 11:50:04 2017 -0700

--
 .../hadoop/hbase/zookeeper/ZooKeeperMainServer.java| 13 +
 1 file changed, 13 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1937a93c/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
index e81da59..5415fd6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
@@ -20,7 +20,9 @@
 package org.apache.hadoop.hbase.zookeeper;
 
 import java.io.IOException;
+import java.util.concurrent.TimeUnit;
 
+import org.apache.curator.shaded.com.google.common.base.Stopwatch;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -48,6 +50,15 @@ public class ZooKeeperMainServer {
 public HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(String[] args)
 throws IOException, InterruptedException {
   super(args);
+  // Make sure we are connected before we proceed. Can take a while on 
some systems. If we
+  // run the command without being connected, we get ConnectionLoss 
KeeperErrorConnection...
+  Stopwatch stopWatch = Stopwatch.createStarted();
+  while (!this.zk.getState().isConnected()) {
+Thread.sleep(1);
+if (stopWatch.elapsed(TimeUnit.SECONDS) > 10) {
+  throw new InterruptedException("Failed connect " + this.zk);
+}
+  }
 }
 
 /**
@@ -100,6 +111,8 @@ public class ZooKeeperMainServer {
   }
 }
 // If command-line arguments, run our hack so they are executed.
+// ZOOKEEPER-1897 was committed to zookeeper-3.4.6 but elsewhere in this 
class we say
+// 3.4.6 breaks command-processing; TODO.
 if (hasCommandLineArguments(args)) {
   HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain zkm =
 new HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(newArgs);



hbase git commit: HBASE-19102 TestZooKeeperMainServer fails with KeeperException

2017-11-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 cd05007da -> 9f7df19df


HBASE-19102 TestZooKeeperMainServer fails with KeeperException

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9f7df19d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9f7df19d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9f7df19d

Branch: refs/heads/branch-1.4
Commit: 9f7df19dfaff4df275d45e0ba9d0ffcd45612483
Parents: cd05007
Author: Michael Stack 
Authored: Thu Oct 26 21:05:04 2017 -0700
Committer: Michael Stack 
Committed: Fri Nov 3 11:49:47 2017 -0700

--
 .../hadoop/hbase/zookeeper/ZooKeeperMainServer.java| 13 +
 1 file changed, 13 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9f7df19d/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
index e81da59..5415fd6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
@@ -20,7 +20,9 @@
 package org.apache.hadoop.hbase.zookeeper;
 
 import java.io.IOException;
+import java.util.concurrent.TimeUnit;
 
+import org.apache.curator.shaded.com.google.common.base.Stopwatch;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -48,6 +50,15 @@ public class ZooKeeperMainServer {
 public HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(String[] args)
 throws IOException, InterruptedException {
   super(args);
+  // Make sure we are connected before we proceed. Can take a while on 
some systems. If we
+  // run the command without being connected, we get ConnectionLoss 
KeeperErrorConnection...
+  Stopwatch stopWatch = Stopwatch.createStarted();
+  while (!this.zk.getState().isConnected()) {
+Thread.sleep(1);
+if (stopWatch.elapsed(TimeUnit.SECONDS) > 10) {
+  throw new InterruptedException("Failed connect " + this.zk);
+}
+  }
 }
 
 /**
@@ -100,6 +111,8 @@ public class ZooKeeperMainServer {
   }
 }
 // If command-line arguments, run our hack so they are executed.
+// ZOOKEEPER-1897 was committed to zookeeper-3.4.6 but elsewhere in this 
class we say
+// 3.4.6 breaks command-processing; TODO.
 if (hasCommandLineArguments(args)) {
   HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain zkm =
 new HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(newArgs);



hbase git commit: HBASE-19102 TestZooKeeperMainServer fails with KeeperException

2017-11-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-1 27d8a6d12 -> 654712878


HBASE-19102 TestZooKeeperMainServer fails with KeeperException

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/65471287
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/65471287
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/65471287

Branch: refs/heads/branch-1
Commit: 65471287894e970d2cbf9d19959f4f8f52a49549
Parents: 27d8a6d
Author: Michael Stack 
Authored: Thu Oct 26 21:05:04 2017 -0700
Committer: Michael Stack 
Committed: Fri Nov 3 11:49:18 2017 -0700

--
 .../hadoop/hbase/zookeeper/ZooKeeperMainServer.java| 13 +
 1 file changed, 13 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/65471287/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
index e81da59..5415fd6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
@@ -20,7 +20,9 @@
 package org.apache.hadoop.hbase.zookeeper;
 
 import java.io.IOException;
+import java.util.concurrent.TimeUnit;
 
+import org.apache.curator.shaded.com.google.common.base.Stopwatch;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -48,6 +50,15 @@ public class ZooKeeperMainServer {
 public HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(String[] args)
 throws IOException, InterruptedException {
   super(args);
+  // Make sure we are connected before we proceed. Can take a while on 
some systems. If we
+  // run the command without being connected, we get ConnectionLoss 
KeeperErrorConnection...
+  Stopwatch stopWatch = Stopwatch.createStarted();
+  while (!this.zk.getState().isConnected()) {
+Thread.sleep(1);
+if (stopWatch.elapsed(TimeUnit.SECONDS) > 10) {
+  throw new InterruptedException("Failed connect " + this.zk);
+}
+  }
 }
 
 /**
@@ -100,6 +111,8 @@ public class ZooKeeperMainServer {
   }
 }
 // If command-line arguments, run our hack so they are executed.
+// ZOOKEEPER-1897 was committed to zookeeper-3.4.6 but elsewhere in this 
class we say
+// 3.4.6 breaks command-processing; TODO.
 if (hasCommandLineArguments(args)) {
   HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain zkm =
 new HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(newArgs);



hbase git commit: HBASE-19102 TestZooKeeperMainServer fails with KeeperException

2017-11-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 9dc9d0394 -> 0fb3bd2e3


HBASE-19102 TestZooKeeperMainServer fails with KeeperException

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0fb3bd2e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0fb3bd2e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0fb3bd2e

Branch: refs/heads/branch-2
Commit: 0fb3bd2e3c3b5bcce2f1c374bd07e4811ebfae0b
Parents: 9dc9d03
Author: Michael Stack 
Authored: Thu Oct 26 21:05:04 2017 -0700
Committer: Michael Stack 
Committed: Fri Nov 3 11:48:53 2017 -0700

--
 .../hadoop/hbase/zookeeper/ZooKeeperMainServer.java| 13 +
 1 file changed, 13 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0fb3bd2e/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
index cf76cbb..20a2d6d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
@@ -20,7 +20,9 @@
 package org.apache.hadoop.hbase.zookeeper;
 
 import java.io.IOException;
+import java.util.concurrent.TimeUnit;
 
+import org.apache.curator.shaded.com.google.common.base.Stopwatch;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -48,6 +50,15 @@ public class ZooKeeperMainServer {
 public HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(String[] args)
 throws IOException, InterruptedException {
   super(args);
+  // Make sure we are connected before we proceed. Can take a while on 
some systems. If we
+  // run the command without being connected, we get ConnectionLoss 
KeeperErrorConnection...
+  Stopwatch stopWatch = Stopwatch.createStarted();
+  while (!this.zk.getState().isConnected()) {
+Thread.sleep(1);
+if (stopWatch.elapsed(TimeUnit.SECONDS) > 10) {
+  throw new InterruptedException("Failed connect " + this.zk);
+}
+  }
 }
 
 /**
@@ -100,6 +111,8 @@ public class ZooKeeperMainServer {
   }
 }
 // If command-line arguments, run our hack so they are executed.
+// ZOOKEEPER-1897 was committed to zookeeper-3.4.6 but elsewhere in this 
class we say
+// 3.4.6 breaks command-processing; TODO.
 if (hasCommandLineArguments(args)) {
   HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain zkm =
 new HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(newArgs);



hbase git commit: HBASE-19102 TestZooKeeperMainServer fails with KeeperException

2017-11-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 4857cbd76 -> 4985e2730


HBASE-19102 TestZooKeeperMainServer fails with KeeperException

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4985e273
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4985e273
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4985e273

Branch: refs/heads/master
Commit: 4985e273079b1dfe95db0befd49266ad46754b63
Parents: 4857cbd
Author: Michael Stack 
Authored: Thu Oct 26 21:05:04 2017 -0700
Committer: Michael Stack 
Committed: Fri Nov 3 11:48:25 2017 -0700

--
 .../hadoop/hbase/zookeeper/ZooKeeperMainServer.java| 13 +
 1 file changed, 13 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4985e273/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
index cf76cbb..20a2d6d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/zookeeper/ZooKeeperMainServer.java
@@ -20,7 +20,9 @@
 package org.apache.hadoop.hbase.zookeeper;
 
 import java.io.IOException;
+import java.util.concurrent.TimeUnit;
 
+import org.apache.curator.shaded.com.google.common.base.Stopwatch;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
@@ -48,6 +50,15 @@ public class ZooKeeperMainServer {
 public HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(String[] args)
 throws IOException, InterruptedException {
   super(args);
+  // Make sure we are connected before we proceed. Can take a while on 
some systems. If we
+  // run the command without being connected, we get ConnectionLoss 
KeeperErrorConnection...
+  Stopwatch stopWatch = Stopwatch.createStarted();
+  while (!this.zk.getState().isConnected()) {
+Thread.sleep(1);
+if (stopWatch.elapsed(TimeUnit.SECONDS) > 10) {
+  throw new InterruptedException("Failed connect " + this.zk);
+}
+  }
 }
 
 /**
@@ -100,6 +111,8 @@ public class ZooKeeperMainServer {
   }
 }
 // If command-line arguments, run our hack so they are executed.
+// ZOOKEEPER-1897 was committed to zookeeper-3.4.6 but elsewhere in this 
class we say
+// 3.4.6 breaks command-processing; TODO.
 if (hasCommandLineArguments(args)) {
   HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain zkm =
 new HACK_UNTIL_ZOOKEEPER_1897_ZooKeeperMain(newArgs);



hbase git commit: HBASE-19152 Update refguide 'how to build an RC' and the make_rc.sh script; AMENDMENT adding generation of hashes

2017-11-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master 4edfa0656 -> 4857cbd76


HBASE-19152 Update refguide 'how to build an RC' and the make_rc.sh script; 
AMENDMENT adding generation of hashes


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4857cbd7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4857cbd7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4857cbd7

Branch: refs/heads/master
Commit: 4857cbd762265b272efee06925797badccb3ef68
Parents: 4edfa06
Author: Michael Stack 
Authored: Fri Nov 3 10:06:20 2017 -0700
Committer: Michael Stack 
Committed: Fri Nov 3 10:06:55 2017 -0700

--
 dev-support/make_rc.sh | 8 +---
 1 file changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4857cbd7/dev-support/make_rc.sh
--
diff --git a/dev-support/make_rc.sh b/dev-support/make_rc.sh
index 83b5821..19f906f 100755
--- a/dev-support/make_rc.sh
+++ b/dev-support/make_rc.sh
@@ -104,11 +104,13 @@ build_bin
 MAVEN_OPTS="${mvnopts}" ${mvn} deploy -DskipTests -Papache-release -Prelease \
 -Dmaven.repo.local=${output_dir}/repository
 
-echo "DONE"
+# Do sha1 and md5
+cd ${output_dir}
+for i in *.tar.gz; do echo $i; gpg --print-md SHA512 $i > $i.sha ; done
+for i in *.tar.gz; do echo $i; gpg --print-md MD5 $i > $i.md5 ; done
+
 echo "Check the content of ${output_dir}.  If good, sign and push to 
dist.apache.org"
 echo " cd ${output_dir}"
-echo ' for i in *.tar.gz; do echo $i; gpg --print-md SHA512 $i > $i.sha ; done'
-echo ' for i in *.tar.gz; do echo $i; gpg --print-md MD5 $i > $i.md5 ; done'
 echo ' for i in *.tar.gz; do echo $i; gpg --armor --output $i.asc --detach-sig 
$i  ; done'
 echo ' rsync -av ${output_dir}/*.gz ${output_dir}/*.md5 ${output_dir}/*.sha 
${output_dir}/*.asc ${APACHE_HBASE_DIST_DEV_DIR}/${hbase_name}/'
 echo "Check the content deployed to maven.  If good, close the repo and record 
links of temporary staging repo"



hbase git commit: HBASE-19152 Update refguide 'how to build an RC' and the make_rc.sh script; AMENDMENT adding generation of hashes

2017-11-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 9026d92f5 -> 9dc9d0394


HBASE-19152 Update refguide 'how to build an RC' and the make_rc.sh script; 
AMENDMENT adding generation of hashes


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9dc9d039
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9dc9d039
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9dc9d039

Branch: refs/heads/branch-2
Commit: 9dc9d0394d57100b32e45788d29ade2e139b80f7
Parents: 9026d92
Author: Michael Stack 
Authored: Fri Nov 3 10:06:20 2017 -0700
Committer: Michael Stack 
Committed: Fri Nov 3 10:06:20 2017 -0700

--
 dev-support/make_rc.sh | 8 +---
 1 file changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9dc9d039/dev-support/make_rc.sh
--
diff --git a/dev-support/make_rc.sh b/dev-support/make_rc.sh
index 83b5821..19f906f 100755
--- a/dev-support/make_rc.sh
+++ b/dev-support/make_rc.sh
@@ -104,11 +104,13 @@ build_bin
 MAVEN_OPTS="${mvnopts}" ${mvn} deploy -DskipTests -Papache-release -Prelease \
 -Dmaven.repo.local=${output_dir}/repository
 
-echo "DONE"
+# Do sha1 and md5
+cd ${output_dir}
+for i in *.tar.gz; do echo $i; gpg --print-md SHA512 $i > $i.sha ; done
+for i in *.tar.gz; do echo $i; gpg --print-md MD5 $i > $i.md5 ; done
+
 echo "Check the content of ${output_dir}.  If good, sign and push to 
dist.apache.org"
 echo " cd ${output_dir}"
-echo ' for i in *.tar.gz; do echo $i; gpg --print-md SHA512 $i > $i.sha ; done'
-echo ' for i in *.tar.gz; do echo $i; gpg --print-md MD5 $i > $i.md5 ; done'
 echo ' for i in *.tar.gz; do echo $i; gpg --armor --output $i.asc --detach-sig 
$i  ; done'
 echo ' rsync -av ${output_dir}/*.gz ${output_dir}/*.md5 ${output_dir}/*.sha 
${output_dir}/*.asc ${APACHE_HBASE_DIST_DEV_DIR}/${hbase_name}/'
 echo "Check the content deployed to maven.  If good, close the repo and record 
links of temporary staging repo"



hbase git commit: HBASE-19152 Update refguide 'how to build an RC' and the make_rc.sh script

2017-11-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 a9f0c5d4e -> 9026d92f5


HBASE-19152 Update refguide 'how to build an RC' and the make_rc.sh script

Removes src.xml used building src tgz via hbase-assembly.
Use git archive instead going forward. Updates developer release candidate
documentation and the make_rc.sh script.

Signed-off-by: Sean Busbey 
Signed-off-by: Peter Somogyi 
Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/9026d92f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/9026d92f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/9026d92f

Branch: refs/heads/branch-2
Commit: 9026d92f59fe67285eb7c91702c2634ac681f4f3
Parents: a9f0c5d
Author: Michael Stack 
Authored: Wed Nov 1 13:36:19 2017 -0700
Committer: Michael Stack 
Committed: Fri Nov 3 09:58:18 2017 -0700

--
 dev-support/make_rc.sh |  89 ++---
 hbase-assembly/src/main/assembly/src.xml   | 149 ---
 src/main/asciidoc/_chapters/developer.adoc | 243 +++-
 3 files changed, 209 insertions(+), 272 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/9026d92f/dev-support/make_rc.sh
--
diff --git a/dev-support/make_rc.sh b/dev-support/make_rc.sh
index b88a984..83b5821 100755
--- a/dev-support/make_rc.sh
+++ b/dev-support/make_rc.sh
@@ -28,8 +28,17 @@
 
 set -e
 
-devsupport=`dirname "$0"`
-devsupport=`cd "$devsupport">/dev/null; pwd`
+# Script checks out a tag, cleans the checkout and then builds src and bin
+# tarballs. It then deploys to the apache maven repository.
+# Presumes run from git dir.
+
+# Need a git tag to build.
+if [ "$1" = "" ]
+then
+  echo -n "Usage: $0 TAG_TO_PACKAGE"
+  exit 1
+fi
+git_tag=$1
 
 # Set mvn and mvnopts
 mvn=mvn
@@ -41,45 +50,65 @@ if [ "$MAVEN_OPTS" != "" ]; then
   mvnopts="${MAVEN_OPTS}"
 fi
 
-# Make a dir to save tgzs in.
+# Ensure we are inside a git repo before making progress
+# The below will fail if outside git.
+git -C . rev-parse
+
+# Checkout git_tag
+git checkout "${git_tag}"
+
+# Get mvn protject version
+#shellcheck disable=SC2016
+version=$(${mvn} -q -N -Dexec.executable="echo" 
-Dexec.args='${project.version}' exec:exec)
+hbase_name="hbase-${version}"
+
+# Make a dir to save tgzs into.
 d=`date -u +"%Y%m%dT%H%M%SZ"`
-archivedir="$(pwd)/../`basename $0`.$d"
-echo "Archive dir ${archivedir}"
-mkdir -p "${archivedir}"
+output_dir="/${TMPDIR}/$hbase_name.$d"
+mkdir -p "${output_dir}"
+
 
-function tgz_mover {
-  mv ./hbase-assembly/target/hbase-*.tar.gz "${archivedir}"
+# Build src tgz.
+function build_src {
+  git archive --format=tar.gz 
--output="${output_dir}/${hbase_name}-src.tar.gz" --prefix="${hbase_name}/" 
"${git_tag}"
 }
 
-function deploy {
-  MAVEN_OPTS="${mvnopts}" ${mvn} clean install -DskipTests -Prelease \
--Dmaven.repo.local=${archivedir}/repository
-  MAVEN_OPTS="${mvnopts}" ${mvn} install -DskipTests post-site assembly:single 
-Prelease \
--Dmaven.repo.local=${archivedir}/repository
-  tgz_mover
-  MAVEN_OPTS="${mvnopts}" ${mvn} deploy -DskipTests -Papache-release -Prelease 
\
--Dmaven.repo.local=${archivedir}/repository
+# Build bin tgz
+function build_bin {
+  MAVEN_OPTS="${mvnopts}" ${mvn} clean install -DskipTests -Papache-release 
-Prelease \
+-Dmaven.repo.local=${output_dir}/repository
+  MAVEN_OPTS="${mvnopts}" ${mvn} install -DskipTests site assembly:single 
-Papache-release -Prelease \
+-Dmaven.repo.local=${output_dir}/repository
+  mv ./hbase-assembly/target/hbase-*.tar.gz "${output_dir}"
 }
 
-# Build src tarball
-# run clean separate from assembly:single because it fails to clean shaded 
modules correctly
+# Make sure all clean.
+git clean -f -x -d
 MAVEN_OPTS="${mvnopts}" ${mvn} clean
-MAVEN_OPTS="${mvnopts}" ${mvn} install -DskipTests assembly:single \
-  -Dassembly.file="$(pwd)/hbase-assembly/src/main/assembly/src.xml" \
-  -Prelease -Dmaven.repo.local=${archivedir}/repository
-
-tgz_mover
 
 # Now do the two builds,  one for hadoop1, then hadoop2
-deploy
+# Run a rat check.
+${mvn} apache-rat:check
+
+#Build src.
+build_src
+
+# Build bin product
+build_bin
+
+# Deploy to mvn repository
+# Depends on build_bin having populated the local repository
+# If the below upload fails, you will probably have to clean the partial
+# upload from repository.apache.org by 'drop'ping it from the staging
+# repository before restart.
+MAVEN_OPTS="${mvnopts}" ${mvn} deploy -DskipTests -Papache-release -Prelease \
+-Dmaven.repo.local=${output_dir}/repository
 
 echo "DONE"
-echo "Check the content of ${archivedir}.  If good, sign and push to 
dist.apache.org"
-echo " cd ${archivedir}"
-echo ' for i in *.tar.gz; do echo $i; gpg --print-mds $i > $i.mds ; done'
-echo ' for 

hbase git commit: HBASE-19152 Update refguide 'how to build an RC' and the make_rc.sh script

2017-11-03 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master e79a007dd -> 4edfa0656


HBASE-19152 Update refguide 'how to build an RC' and the make_rc.sh script

Removes src.xml used building src tgz via hbase-assembly.
Use git archive instead going forward. Updates developer release candidate
documentation and the make_rc.sh script.

Signed-off-by: Sean Busbey 
Signed-off-by: Peter Somogyi 
Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4edfa065
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4edfa065
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4edfa065

Branch: refs/heads/master
Commit: 4edfa065642835a600f5d5d159e69ef497900455
Parents: e79a007
Author: Michael Stack 
Authored: Wed Nov 1 13:36:19 2017 -0700
Committer: Michael Stack 
Committed: Fri Nov 3 09:52:28 2017 -0700

--
 dev-support/make_rc.sh |  89 ++---
 hbase-assembly/src/main/assembly/src.xml   | 151 ---
 src/main/asciidoc/_chapters/developer.adoc | 234 +++-
 3 files changed, 204 insertions(+), 270 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4edfa065/dev-support/make_rc.sh
--
diff --git a/dev-support/make_rc.sh b/dev-support/make_rc.sh
index b88a984..83b5821 100755
--- a/dev-support/make_rc.sh
+++ b/dev-support/make_rc.sh
@@ -28,8 +28,17 @@
 
 set -e
 
-devsupport=`dirname "$0"`
-devsupport=`cd "$devsupport">/dev/null; pwd`
+# Script checks out a tag, cleans the checkout and then builds src and bin
+# tarballs. It then deploys to the apache maven repository.
+# Presumes run from git dir.
+
+# Need a git tag to build.
+if [ "$1" = "" ]
+then
+  echo -n "Usage: $0 TAG_TO_PACKAGE"
+  exit 1
+fi
+git_tag=$1
 
 # Set mvn and mvnopts
 mvn=mvn
@@ -41,45 +50,65 @@ if [ "$MAVEN_OPTS" != "" ]; then
   mvnopts="${MAVEN_OPTS}"
 fi
 
-# Make a dir to save tgzs in.
+# Ensure we are inside a git repo before making progress
+# The below will fail if outside git.
+git -C . rev-parse
+
+# Checkout git_tag
+git checkout "${git_tag}"
+
+# Get mvn protject version
+#shellcheck disable=SC2016
+version=$(${mvn} -q -N -Dexec.executable="echo" 
-Dexec.args='${project.version}' exec:exec)
+hbase_name="hbase-${version}"
+
+# Make a dir to save tgzs into.
 d=`date -u +"%Y%m%dT%H%M%SZ"`
-archivedir="$(pwd)/../`basename $0`.$d"
-echo "Archive dir ${archivedir}"
-mkdir -p "${archivedir}"
+output_dir="/${TMPDIR}/$hbase_name.$d"
+mkdir -p "${output_dir}"
+
 
-function tgz_mover {
-  mv ./hbase-assembly/target/hbase-*.tar.gz "${archivedir}"
+# Build src tgz.
+function build_src {
+  git archive --format=tar.gz 
--output="${output_dir}/${hbase_name}-src.tar.gz" --prefix="${hbase_name}/" 
"${git_tag}"
 }
 
-function deploy {
-  MAVEN_OPTS="${mvnopts}" ${mvn} clean install -DskipTests -Prelease \
--Dmaven.repo.local=${archivedir}/repository
-  MAVEN_OPTS="${mvnopts}" ${mvn} install -DskipTests post-site assembly:single 
-Prelease \
--Dmaven.repo.local=${archivedir}/repository
-  tgz_mover
-  MAVEN_OPTS="${mvnopts}" ${mvn} deploy -DskipTests -Papache-release -Prelease 
\
--Dmaven.repo.local=${archivedir}/repository
+# Build bin tgz
+function build_bin {
+  MAVEN_OPTS="${mvnopts}" ${mvn} clean install -DskipTests -Papache-release 
-Prelease \
+-Dmaven.repo.local=${output_dir}/repository
+  MAVEN_OPTS="${mvnopts}" ${mvn} install -DskipTests site assembly:single 
-Papache-release -Prelease \
+-Dmaven.repo.local=${output_dir}/repository
+  mv ./hbase-assembly/target/hbase-*.tar.gz "${output_dir}"
 }
 
-# Build src tarball
-# run clean separate from assembly:single because it fails to clean shaded 
modules correctly
+# Make sure all clean.
+git clean -f -x -d
 MAVEN_OPTS="${mvnopts}" ${mvn} clean
-MAVEN_OPTS="${mvnopts}" ${mvn} install -DskipTests assembly:single \
-  -Dassembly.file="$(pwd)/hbase-assembly/src/main/assembly/src.xml" \
-  -Prelease -Dmaven.repo.local=${archivedir}/repository
-
-tgz_mover
 
 # Now do the two builds,  one for hadoop1, then hadoop2
-deploy
+# Run a rat check.
+${mvn} apache-rat:check
+
+#Build src.
+build_src
+
+# Build bin product
+build_bin
+
+# Deploy to mvn repository
+# Depends on build_bin having populated the local repository
+# If the below upload fails, you will probably have to clean the partial
+# upload from repository.apache.org by 'drop'ping it from the staging
+# repository before restart.
+MAVEN_OPTS="${mvnopts}" ${mvn} deploy -DskipTests -Papache-release -Prelease \
+-Dmaven.repo.local=${output_dir}/repository
 
 echo "DONE"
-echo "Check the content of ${archivedir}.  If good, sign and push to 
dist.apache.org"
-echo " cd ${archivedir}"
-echo ' for i in *.tar.gz; do echo $i; gpg --print-mds $i > $i.mds ; done'
-echo ' for i in

[25/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html 
b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html
index 9d9e1e5..93762a7 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.ReferenceFileFilter.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class FSUtils.ReferenceFileFilter
+public static class FSUtils.ReferenceFileFilter
 extends AbstractFileStatusFilter
 
 
@@ -220,7 +220,7 @@ extends 
 
 fs
-private final org.apache.hadoop.fs.FileSystem fs
+private final org.apache.hadoop.fs.FileSystem fs
 
 
 
@@ -237,7 +237,7 @@ extends 
 
 ReferenceFileFilter
-public ReferenceFileFilter(org.apache.hadoop.fs.FileSystem fs)
+public ReferenceFileFilter(org.apache.hadoop.fs.FileSystem fs)
 
 
 
@@ -254,7 +254,7 @@ extends 
 
 accept
-protected boolean accept(org.apache.hadoop.fs.Path p,
+protected boolean accept(org.apache.hadoop.fs.Path p,
  @CheckForNull
  http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
 title="class or interface in java.lang">Boolean isDir)
 Description copied from 
class: AbstractFileStatusFilter

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html 
b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html
index 788b8f7..ef91a42 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.RegionDirFilter.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class FSUtils.RegionDirFilter
+public static class FSUtils.RegionDirFilter
 extends AbstractFileStatusFilter
 Filter for all dirs that don't start with '.'
 
@@ -225,7 +225,7 @@ extends 
 
 regionDirPattern
-public static final http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern regionDirPattern
+public static final http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html?is-external=true";
 title="class or interface in java.util.regex">Pattern regionDirPattern
 
 
 
@@ -234,7 +234,7 @@ extends 
 
 fs
-final org.apache.hadoop.fs.FileSystem fs
+final org.apache.hadoop.fs.FileSystem fs
 
 
 
@@ -251,7 +251,7 @@ extends 
 
 RegionDirFilter
-public RegionDirFilter(org.apache.hadoop.fs.FileSystem fs)
+public RegionDirFilter(org.apache.hadoop.fs.FileSystem fs)
 
 
 
@@ -268,7 +268,7 @@ extends 
 
 accept
-protected boolean accept(org.apache.hadoop.fs.Path p,
+protected boolean accept(org.apache.hadoop.fs.Path p,
  @CheckForNull
  http://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html?is-external=true";
 title="class or interface in java.lang">Boolean isDir)
 Description copied from 
class: AbstractFileStatusFilter

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html 
b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html
index 6fa4dba..a9eb2bd 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.UserTableDirFilter.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class FSUtils.UserTableDirFilter
+public static class FSUtils.UserTableDirFilter
 extends FSUtils.BlackListDirFilter
 A PathFilter that returns usertable 
directories. To get all directories use the
  FSUtils.BlackListDirFilter with 
a null blacklist
@@ -212,7 +212,7 @@ extends 
 
 UserTableDirFilter
-public UserTableDirFilter(org.apache.hadoop.fs.FileSystem fs)
+public UserTableDirFilter(org.apache.hadoop.fs.FileSystem fs)
 
 
 
@@ -229,7 +229,7 @@ extends 
 
 isValidName
-protected boolean isValidName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
+protected boolean isValidName(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name)
 
 Overrides:
 isValidName in
 class FSUtils.BlackListDirFilter



[46/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 96098d2..ddde653 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase – Checkstyle Results
 
@@ -286,10 +286,10 @@
  Warnings
  Errors
 
-3509
+3511
 0
 0
-22040
+21935
 
 Files
 
@@ -1977,7 +1977,7 @@
 org/apache/hadoop/hbase/client/RawAsyncTable.java
 0
 0
-2
+1
 
 org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
 0
@@ -2352,7 +2352,7 @@
 org/apache/hadoop/hbase/client/TestClientClusterStatus.java
 0
 0
-2
+1
 
 org/apache/hadoop/hbase/client/TestClientExponentialBackoff.java
 0
@@ -2787,7 +2787,7 @@
 org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.java
 0
 0
-12
+11
 
 org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
 0
@@ -4217,7 +4217,7 @@
 org/apache/hadoop/hbase/http/HttpServer.java
 0
 0
-47
+46
 
 org/apache/hadoop/hbase/http/HttpServerFunctionalTest.java
 0
@@ -6197,7 +6197,7 @@
 org/apache/hadoop/hbase/master/MasterFileSystem.java
 0
 0
-22
+18
 
 org/apache/hadoop/hbase/master/MasterMetaBootstrap.java
 0
@@ -6567,7 +6567,7 @@
 org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
 0
 0
-3
+1
 
 org/apache/hadoop/hbase/master/assignment/MoveRegionProcedure.java
 0
@@ -6879,1217 +6879,1207 @@
 0
 1
 
-org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizer.java
-0
-0
-1
-
 org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/master/procedure/AbstractStateMachineNamespaceProcedure.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/master/procedure/AbstractStateMachineRegionProcedure.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/master/procedure/AbstractStateMachineTableProcedure.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/master/procedure/CloneSnapshotProcedure.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/master/procedure/CreateNamespaceProcedure.java
 0
 0
 26
-
+
 org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/procedure/DeleteNamespaceProcedure.java
 0
 0
 29
-
+
 org/apache/hadoop/hbase/master/procedure/DeleteTableProcedure.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/master/procedure/DisableTableProcedure.java
 0
 0
 38
-
+
 org/apache/hadoop/hbase/master/procedure/EnableTableProcedure.java
 0
 0
 72
-
+
 org/apache/hadoop/hbase/master/procedure/MasterProcedureEnv.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/master/procedure/MasterProcedureScheduler.java
 0
 0
 25
-
+
 org/apache/hadoop/hbase/master/procedure/MasterProcedureSchedulerPerformanceEvaluation.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/master/procedure/MasterProcedureUtil.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/master/procedure/ModifyNamespaceProcedure.java
 0
 0
 17
-
+
 org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
 0
 0
 46
-
+
 org/apache/hadoop/hbase/master/procedure/ProcedureDescriber.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/master/procedure/ProcedurePrepareLatch.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/procedure/ProcedureSyncWait.java
 0
 0
 46
-
+
 org/apache/hadoop/hbase/master/procedure/RSProcedureDispatcher.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/master/procedure/RecoverMetaProcedure.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/master/procedure/RestoreSnapshotProcedure.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
 0
 0
 58
-
+
 org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedureFromClient.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/procedure/TestMasterFailoverWithProcedures.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/procedure/TestMasterProcedureEvents.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/master/procedure/TestMasterProcedureScheduler.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/master/procedure/TestMasterProcedureSchedulerConcurrency.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/master/procedure/TestMasterProcedureWalLease.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/master/procedure/TestRestoreSnapshotProcedure.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/procedure/TestSafemodeBringsDownMaster.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/procedure/TestServerCrashProcedure.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/master/procedure/TestTableDDLProcedureBase.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/procedure/TestTableDescriptorModificationFromClient.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/master/procedure/TestWALProcedureStoreOnHDF

[19/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CheckAndMutateBuilderImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CheckAndMutateBuilderImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CheckAndMutateBuilderImpl.html
index 0d33cae..19fa457 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CheckAndMutateBuilderImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CheckAndMutateBuilderImpl.html
@@ -26,564 +26,607 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
java.util.stream.Collectors.toList;
-021import static 
org.apache.hadoop.hbase.HConstants.EMPTY_END_ROW;
-022import static 
org.apache.hadoop.hbase.HConstants.EMPTY_START_ROW;
-023import static 
org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
-024import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+021import static 
org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
+022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+023
+024import com.google.protobuf.RpcChannel;
 025
-026import com.google.protobuf.RpcChannel;
-027
-028import java.io.IOException;
-029import java.util.ArrayList;
-030import java.util.Arrays;
-031import java.util.List;
-032import java.util.Optional;
-033import 
java.util.concurrent.CompletableFuture;
-034import java.util.concurrent.TimeUnit;
-035import 
java.util.concurrent.atomic.AtomicBoolean;
-036import 
java.util.concurrent.atomic.AtomicInteger;
-037import java.util.function.Function;
-038
-039import 
org.apache.hadoop.conf.Configuration;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.HRegionLocation;
-042import 
org.apache.hadoop.hbase.TableName;
-043import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
-044import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-045import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-046import 
org.apache.hadoop.hbase.util.Bytes;
-047import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-048import 
org.apache.yetus.audience.InterfaceAudience;
-049
-050import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-051import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-052import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-053import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-054import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
-055import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
-056import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest;
-057import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse;
-058import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
-059import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse;
-060import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
-061import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse;
-062import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
-064
-065/**
-066 * The implementation of RawAsyncTable.
-067 */
-068@InterfaceAudience.Private
-069class RawAsyncTableImpl implements 
RawAsyncTable {
+026import java.io.IOException;
+027import java.util.ArrayList;
+028import java.util.Arrays;
+029import java.util.List;
+030import 
java.util.concurrent.CompletableFuture;
+031import java.util.concurrent.TimeUnit;
+032import 
java.util.concurrent.atomic.AtomicBoolean;
+033import 
java.util.concurrent.atomic.AtomicInteger;
+034import java.util.function.Function;
+035
+036import 
org.apache.hadoop.conf.Configuration;
+037import 
org.apache.hadoop.hbase.CompareOperator;
+038import 
org.apache.hadoop.hbase.HConstants;
+039import 
org.apache.hadoop.hbase.HRegionLocation;
+040import 
org.apache.hadoop.hbase.TableName;
+041import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
+042import 
org.apache.hadoop.hbase.filter.BinaryComparator;
+043import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+044import 
org.apache.hadoop.hbase.util.Bytes;
+045import 
org.apache.hadoop.hbase.util.ReflectionUtils;
+046import 
org.apache.yetus.audience.InterfaceAudience;
+047
+048import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+049import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
+050import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+051import 
org.apache.hadoop.hbase.shaded.protobuf.RequestC

[44/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 5552da2..62e9574 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -8572,6 +8572,8 @@
  
 callable
 - Variable in class org.apache.hadoop.hbase.client.ClientScanner
  
+callable
 - Variable in class org.apache.hadoop.hbase.client.RawAsyncTableImpl.CoprocessorServiceBuilderImpl
+ 
 callable
 - Variable in class org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.RetryingRPC
  
 callable
 - Variable in class org.apache.hadoop.hbase.replication.regionserver.RegionReplicaReplicationEndpoint.RetryingRpcCallable
@@ -8582,6 +8584,8 @@
  
 callback
 - Variable in class org.apache.hadoop.hbase.client.AsyncRequestFutureImpl
  
+callback
 - Variable in class org.apache.hadoop.hbase.client.RawAsyncTableImpl.CoprocessorServiceBuilderImpl
+ 
 Callback(Promise,
 long, Collection) - Constructor for class 
org.apache.hadoop.hbase.io.asyncfs.FanOutOneBlockAsyncDFSOutput.Callback
  
 callback - 
Variable in class org.apache.hadoop.hbase.ipc.Call
@@ -10787,6 +10791,10 @@
 
 checkServiceStarted()
 - Method in class org.apache.hadoop.hbase.master.HMaster
  
+checkShortCircuitReadBufferSize(Configuration)
 - Static method in class org.apache.hadoop.hbase.util.CommonFSUtils
+
+Check if short circuit read buffer size is set and if not, 
set it to hbase value.
+
 checkShortCircuitReadBufferSize(Configuration)
 - Static method in class org.apache.hadoop.hbase.util.FSUtils
 
 Check if short circuit read buffer size is set and if not, 
set it to hbase value.
@@ -11236,6 +11244,8 @@
  
 claimQueue(String,
 String) - Method in class org.apache.hadoop.hbase.replication.TableBasedReplicationQueuesImpl
  
+CLASS
 - Static variable in class org.apache.hadoop.hbase.util.CommonFSUtils.StreamCapabilities
+ 
 CLASS_LOADER
 - Static variable in exception org.apache.hadoop.hbase.ipc.RemoteWithExtrasException
  
 CLASS_NAME
 - Static variable in class org.apache.hadoop.hbase.util.UnsafeAvailChecker
@@ -13675,6 +13685,19 @@
 
 Commit all writers.
 
+CommonFSUtils - Class in org.apache.hadoop.hbase.util
+
+Utility methods for interacting with the underlying file 
system.
+
+CommonFSUtils()
 - Constructor for class org.apache.hadoop.hbase.util.CommonFSUtils
+ 
+CommonFSUtils.StreamCapabilities - Class in org.apache.hadoop.hbase.util
+ 
+CommonFSUtils.StreamLacksCapabilityException - 
Exception in org.apache.hadoop.hbase.util
+
+Helper exception for those cases where the place where we 
need to check a stream capability
+ is not where we have the needed context to explain the impact and mitigation 
for a lack.
+
 CommonsCryptoAES - Class in org.apache.hadoop.hbase.io.crypto.aes
  
 CommonsCryptoAES(CipherProvider)
 - Constructor for class org.apache.hadoop.hbase.io.crypto.aes.CommonsCryptoAES
@@ -17408,21 +17431,15 @@
 
 Execute the given coprocessor call on the region which 
contains the given row.
 
-coprocessorService(Function, RawAsyncTable.CoprocessorCallable, byte[], byte[], 
RawAsyncTable.CoprocessorCallback) - Method in interface 
org.apache.hadoop.hbase.client.RawAsyncTable
-
-Execute the given coprocessor call on the regions which are 
covered by the range from
- startKey inclusive and endKey exclusive.
-
-coprocessorService(Function, RawAsyncTable.CoprocessorCallable, byte[], boolean, byte[], 
boolean, RawAsyncTable.CoprocessorCallback) - Method in 
interface org.apache.hadoop.hbase.client.RawAsyncTable
+coprocessorService(Function, RawAsyncTable.CoprocessorCallable, 
RawAsyncTable.CoprocessorCallback) - Method in interface 
org.apache.hadoop.hbase.client.RawAsyncTable
 
-Execute the given coprocessor call on the regions which are 
covered by the range from
- startKey and endKey.
+Execute a coprocessor call on the regions which are covered 
by a range.
 
 coprocessorService(Function, RawAsyncTable.CoprocessorCallable, RegionInfo, 
byte[]) - Method in class org.apache.hadoop.hbase.client.RawAsyncTableImpl
  
 coprocessorService(Function, RawAsyncTable.CoprocessorCallable, byte[]) - 
Method in class org.apache.hadoop.hbase.client.RawAsyncTableImpl
  
-coprocessorService(Function, RawAsyncTable.CoprocessorCallable, byte[], boolean, byte[], 
boolean, RawAsyncTable.CoprocessorCallback) - Method in 
class org.apache.hadoop.hbase.client.RawAsyncTableImpl
+coprocessorService(Function, RawAsyncTable.CoprocessorCallable, 
RawAsyncTable.CoprocessorCallback) - Method in class 
org.apache.hadoop.hbase.client.RawAsyncTableImpl
  
 coprocessorService(byte[])
 - Method in interface org.apache.hadoop.hbase.client.Table
 
@@ -17475,6 +17492,8 @@
 
 Deprecated.
  
+CoprocessorServiceBuilderImpl(Function

hbase-site git commit: INFRA-10751 Empty commit

2017-11-03 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site 7d38bdbbf -> 46e1b6b03


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/46e1b6b0
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/46e1b6b0
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/46e1b6b0

Branch: refs/heads/asf-site
Commit: 46e1b6b034d8f655b0258859760f2b85c9e92045
Parents: 7d38bdb
Author: jenkins 
Authored: Fri Nov 3 15:17:21 2017 +
Committer: jenkins 
Committed: Fri Nov 3 15:17:21 2017 +

--

--




[34/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
index a903d1b..e1a397c 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/package-tree.html
@@ -324,11 +324,11 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.master.RegionState.State
+org.apache.hadoop.hbase.master.SplitLogManager.ResubmitDirective
 org.apache.hadoop.hbase.master.SplitLogManager.TerminationStatus
-org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode
 org.apache.hadoop.hbase.master.MetricsMasterSourceFactoryImpl.FactoryStorage
-org.apache.hadoop.hbase.master.SplitLogManager.ResubmitDirective
+org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode
+org.apache.hadoop.hbase.master.RegionState.State
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/master/procedure/MasterProcedureConstants.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/MasterProcedureConstants.html
 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/MasterProcedureConstants.html
index d1656a1..462c4a5 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/procedure/MasterProcedureConstants.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/procedure/MasterProcedureConstants.html
@@ -140,12 +140,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-MASTER_PROCEDURE_LOGDIR
-Used to construct the name of the log directory for master 
procedures
-
-
-
-static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 MASTER_PROCEDURE_THREADS
 Number of threads used by the procedure executor
 
@@ -199,27 +193,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 Field Detail
-
-
-
-
-
-MASTER_PROCEDURE_LOGDIR
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MASTER_PROCEDURE_LOGDIR
-Used to construct the name of the log directory for master 
procedures
-
-See Also:
-Constant
 Field Values
-
-
-
 
 
 
 
 
 MASTER_PROCEDURE_THREADS
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MASTER_PROCEDURE_THREADS
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MASTER_PROCEDURE_THREADS
 Number of threads used by the procedure executor
 
 See Also:
@@ -233,7 +213,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 DEFAULT_MIN_MASTER_PROCEDURE_THREADS
-public static final int DEFAULT_MIN_MASTER_PROCEDURE_THREADS
+public static final int DEFAULT_MIN_MASTER_PROCEDURE_THREADS
 
 See Also:
 Constant
 Field Values
@@ -246,7 +226,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 EXECUTOR_ABORT_ON_CORRUPTION
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String EXECUTOR_ABORT_ON_CORRUPTION
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String EXECUTOR_ABORT_ON_CORRUPTION
 Procedure replay sanity check. In case a WAL is missing or 
unreadable we
  may lose information about pending/running procedures.
  Set this to true in case you want the Master failing on load if a corrupted
@@ -265,7 +245,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 DEFAULT_EXECUTOR_ABORT_ON_CORRUPTION
-public static final boolean DEFAULT_EXECUTOR_ABORT_ON_CORRUPTION
+public static final boolean DEFAULT_EXECUTOR_ABORT_ON_CORRUPTION
 
 See Also:
 Constant
 Field Values

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/master/procedure/package-tree.html

[33/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
index 790..82fcfd8 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/WALProcedureStore.html
@@ -120,7 +120,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Private
  @InterfaceStability.Evolving
-public class WALProcedureStore
+public class WALProcedureStore
 extends ProcedureStoreBase
 WAL implementation of the ProcedureStore.
 
@@ -281,149 +281,155 @@ extends 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-MAX_RETRIES_BEFORE_ROLL_CONF_KEY 
+MASTER_PROCEDURE_LOGDIR
+Used to construct the name of the log directory for master 
procedures
+
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-MAX_SYNC_FAILURE_ROLL_CONF_KEY 
+MAX_RETRIES_BEFORE_ROLL_CONF_KEY 
 
 
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+MAX_SYNC_FAILURE_ROLL_CONF_KEY 
+
+
 private int
 maxRetriesBeforeRoll 
 
-
+
 private int
 maxSyncFailureRoll 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 PERIODIC_ROLL_CONF_KEY 
 
-
+
 private int
 periodicRollMsec 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 ROLL_RETRIES_CONF_KEY 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 ROLL_THRESHOLD_CONF_KEY 
 
-
+
 private int
 rollRetries 
 
-
+
 private long
 rollThreshold 
 
-
+
 private int
 runningProcCount 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/Condition.html?is-external=true";
 title="class or interface in 
java.util.concurrent.locks">Condition
 slotCond 
 
-
+
 private int
 slotIndex 
 
-
+
 private ByteSlot[]
 slots 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/LinkedTransferQueue.html?is-external=true";
 title="class or interface in 
java.util.concurrent">LinkedTransferQueue
 slotsCache 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 STORE_WAL_SYNC_STATS_COUNT 
 
-
+
 private ProcedureStoreTracker
 storeTracker 
 
-
+
 private 
org.apache.hadoop.fs.FSDataOutputStream
 stream 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 SYNC_WAIT_MSEC_CONF_KEY 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/Condition.html?is-external=true";
 title="class or interface in 
java.util.concurrent.locks">Condition
 syncCond 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicReference.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicReferenceThrowable>
 syncException 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
 syncId 
 
-
+
 private int
 syncMaxSlot 
 
-
+
 private 
org.apache.commons.collections4.queue.CircularFifoQueue
 syncMetricsQueue 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true";
 title="class or interface in java.lang">Thread
 syncThread 
 
-
+
 private int
 syncWaitMsec 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true";
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
 totalSynced 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 USE_HSYNC_CONF_KEY 
 
-
+
 private boolean
 useHsync 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 WAIT_BEFORE_ROLL_CONF_KEY 
 
-
+
 private int
 waitBeforeRoll 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/locks/Condition.html?is-external=tru

[45/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index fa5874c..77591bc 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -25,8 +25,8 @@ under the License.
 en-us
 ©2007 - 2017 The Apache Software Foundation
 
-  File: 3509,
- Errors: 22040,
+  File: 3511,
+ Errors: 21935,
  Warnings: 0,
  Infos: 0
   
@@ -2197,7 +2197,7 @@ under the License.
   0
 
 
-  12
+  11
 
   
   
@@ -3261,7 +3261,7 @@ under the License.
   0
 
 
-  2
+  1
 
   
   
@@ -5809,7 +5809,7 @@ under the License.
   0
 
 
-  4
+  3
 
   
   
@@ -8581,7 +8581,7 @@ under the License.
   0
 
 
-  2
+  0
 
   
   
@@ -10961,7 +10961,7 @@ under the License.
   0
 
 
-  10
+  7
 
   
   
@@ -11605,7 +11605,7 @@ under the License.
   0
 
 
-  17
+  11
 
   
   
@@ -12403,7 +12403,7 @@ under the License.
   0
 
 
-  8
+  5
 
   
   
@@ -15446,6 +15446,20 @@ under the License.
   
   
 
+  http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.util.CommonFSUtils.java";>org/apache/hadoop/hbase/util/CommonFSUtils.java
+
+
+  0
+
+
+  0
+
+
+  0
+
+  
+  
+
   http://hbase.apache.org/checkstyle.html#org.apache.hadoop.hbase.mapreduce.TestMapReduceExamples.java";>org/apache/hadoop/hbase/mapreduce/TestMapReduceExamples.java
 
 
@@ -16225,7 +16239,7 @@ under the License.
   0
 
 
-  32
+  31
 
   
   
@@ -16659,7 +16673,7 @@ under the License.
   0
 
 
-  47
+  46
 
   
   
@@ -17933,7 +17947,7 @@ under the License.
   0
 
 
-  11
+  7
 
   
   
@@ -21727,7 +21741,7 @@ under the License.
   0
 
 
-  22
+  18
 
   
   
@@ -24359,7 +24373,7 @@ under the License.
   0
 
 
-  16
+  10
 
   
   
@@ -25997,7 +26011,7 @@ under the License.
   0
 
 
-  8
+  7
 
   
   
@@ -27201,7 +27215,7 @@ under the License.
   0
 
 
-  35
+  6
 
   
   
@@ -27817,7 +27831,7 @@ under the License.
   0
 
 
-  98
+  63
 
   
   
@@ -28167,7 +28181,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -29693,7 +29707,7 @@ under the License.
   0
 
 
-  8
+  7
 
   
   
@@ -31485,7 +31499,7 @@ under the License.
   0
 
 
-  2
+  1
 
   
   
@@ -34075,7 +34089,7 @@ under the License.
  

[09/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.html
index 1553cc6..1daa9e8 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.html
@@ -73,434 +73,431 @@
 065public class HFileSystem extends 
FilterFileSystem {
 066  public static final Log LOG = 
LogFactory.getLog(HFileSystem.class);
 067
-068  /** Parameter name for HBase WAL 
directory */
-069  public static final String 
HBASE_WAL_DIR = "hbase.wal.dir";
-070
-071  private final FileSystem noChecksumFs;  
 // read hfile data from storage
-072  private final boolean 
useHBaseChecksum;
-073  private static volatile byte 
unspecifiedStoragePolicyId = Byte.MIN_VALUE;
-074
-075  /**
-076   * Create a FileSystem object for HBase 
regionservers.
-077   * @param conf The configuration to be 
used for the filesystem
-078   * @param useHBaseChecksum if true, 
then use
-079   *checksum verfication in 
hbase, otherwise
-080   *delegate checksum 
verification to the FileSystem.
-081   */
-082  public HFileSystem(Configuration conf, 
boolean useHBaseChecksum)
-083throws IOException {
-084
-085// Create the default filesystem with 
checksum verification switched on.
-086// By default, any operation to this 
FilterFileSystem occurs on
-087// the underlying filesystem that has 
checksums switched on.
-088this.fs = FileSystem.get(conf);
-089this.useHBaseChecksum = 
useHBaseChecksum;
-090
-091fs.initialize(getDefaultUri(conf), 
conf);
-092
-093// disable checksum verification for 
local fileSystem, see HBASE-11218
-094if (fs instanceof LocalFileSystem) 
{
-095  fs.setWriteChecksum(false);
-096  fs.setVerifyChecksum(false);
-097}
-098
-099addLocationsOrderInterceptor(conf);
-100
-101// If hbase checksum verification is 
switched on, then create a new
-102// filesystem object that has cksum 
verification turned off.
-103// We will avoid verifying checksums 
in the fs client, instead do it
-104// inside of hbase.
-105// If this is the local file system 
hadoop has a bug where seeks
-106// do not go to the correct location 
if setVerifyChecksum(false) is called.
-107// This manifests itself in that 
incorrect data is read and HFileBlocks won't be able to read
-108// their header magic numbers. See 
HBASE-5885
-109if (useHBaseChecksum && !(fs 
instanceof LocalFileSystem)) {
-110  conf = new Configuration(conf);
-111  
conf.setBoolean("dfs.client.read.shortcircuit.skip.checksum", true);
-112  this.noChecksumFs = 
maybeWrapFileSystem(newInstanceFileSystem(conf), conf);
-113  
this.noChecksumFs.setVerifyChecksum(false);
-114} else {
-115  this.noChecksumFs = 
maybeWrapFileSystem(fs, conf);
-116}
+068  private final FileSystem noChecksumFs;  
 // read hfile data from storage
+069  private final boolean 
useHBaseChecksum;
+070  private static volatile byte 
unspecifiedStoragePolicyId = Byte.MIN_VALUE;
+071
+072  /**
+073   * Create a FileSystem object for HBase 
regionservers.
+074   * @param conf The configuration to be 
used for the filesystem
+075   * @param useHBaseChecksum if true, 
then use
+076   *checksum verfication in 
hbase, otherwise
+077   *delegate checksum 
verification to the FileSystem.
+078   */
+079  public HFileSystem(Configuration conf, 
boolean useHBaseChecksum)
+080throws IOException {
+081
+082// Create the default filesystem with 
checksum verification switched on.
+083// By default, any operation to this 
FilterFileSystem occurs on
+084// the underlying filesystem that has 
checksums switched on.
+085this.fs = FileSystem.get(conf);
+086this.useHBaseChecksum = 
useHBaseChecksum;
+087
+088fs.initialize(getDefaultUri(conf), 
conf);
+089
+090// disable checksum verification for 
local fileSystem, see HBASE-11218
+091if (fs instanceof LocalFileSystem) 
{
+092  fs.setWriteChecksum(false);
+093  fs.setVerifyChecksum(false);
+094}
+095
+096addLocationsOrderInterceptor(conf);
+097
+098// If hbase checksum verification is 
switched on, then create a new
+099// filesystem object that has cksum 
verification turned off.
+100// We will avoid verifying checksums 
in the fs client, instead do it
+101// inside of hbase.
+102// If this is the local file system 
hadoop has a bug where seeks
+103// do not go to the correct location 
if setVerifyChecksum(false) is called.
+104// This manifests itself in that 
incorrect data is read and HFileBlocks won't be able to read
+105// their header magic numbers. See 
HBASE-5885
+106if (useHBaseChecksum && !(fs 
instanceof

[12/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.html
index cece735..cb909d9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.html
@@ -28,446 +28,458 @@
 020import static 
org.apache.hadoop.hbase.client.coprocessor.AggregationHelper.getParsedGenericInstance;
 021import static 
org.apache.hadoop.hbase.client.coprocessor.AggregationHelper.validateArgAndGetPB;
 022
-023import java.io.IOException;
-024import java.util.Map;
-025import java.util.NavigableMap;
-026import java.util.NavigableSet;
-027import 
java.util.NoSuchElementException;
-028import java.util.TreeMap;
-029import 
java.util.concurrent.CompletableFuture;
-030
-031import org.apache.hadoop.hbase.Cell;
-032import 
org.apache.hadoop.hbase.client.RawAsyncTable;
-033import 
org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorCallback;
-034import 
org.apache.hadoop.hbase.client.RawScanResultConsumer;
-035import 
org.apache.hadoop.hbase.client.RegionInfo;
-036import 
org.apache.hadoop.hbase.client.Result;
-037import 
org.apache.hadoop.hbase.client.Scan;
-038import 
org.apache.hadoop.hbase.coprocessor.ColumnInterpreter;
-039import 
org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest;
-040import 
org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse;
-041import 
org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateService;
-042import 
org.apache.hadoop.hbase.util.Bytes;
-043import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-044import 
org.apache.yetus.audience.InterfaceAudience;
-045
-046import com.google.protobuf.Message;
-047
-048/**
-049 * This client class is for invoking the 
aggregate functions deployed on the Region Server side via
-050 * the AggregateService. This class will 
implement the supporting functionality for
-051 * summing/processing the individual 
results obtained from the AggregateService for each region.
-052 */
-053@InterfaceAudience.Public
-054public class AsyncAggregationClient {
-055
-056  private static abstract class 
AbstractAggregationCallback
-057  implements 
CoprocessorCallback {
-058
-059private final 
CompletableFuture future;
-060
-061protected boolean finished = false;
-062
-063private void 
completeExceptionally(Throwable error) {
-064  if (finished) {
-065return;
-066  }
-067  finished = true;
-068  
future.completeExceptionally(error);
-069}
-070
-071protected 
AbstractAggregationCallback(CompletableFuture future) {
-072  this.future = future;
-073}
-074
-075@Override
-076public synchronized void 
onRegionError(RegionInfo region, Throwable error) {
-077  completeExceptionally(error);
-078}
-079
-080@Override
-081public synchronized void 
onError(Throwable error) {
-082  completeExceptionally(error);
-083}
-084
-085protected abstract void 
aggregate(RegionInfo region, AggregateResponse resp)
-086throws IOException;
-087
-088@Override
-089public synchronized void 
onRegionComplete(RegionInfo region, AggregateResponse resp) {
-090  try {
-091aggregate(region, resp);
-092  } catch (IOException e) {
-093completeExceptionally(e);
-094  }
-095}
-096
-097protected abstract T 
getFinalResult();
-098
-099@Override
-100public synchronized void onComplete() 
{
-101  if (finished) {
-102return;
-103  }
-104  finished = true;
-105  
future.complete(getFinalResult());
-106}
-107  }
-108
-109  private static  R
-110  
getCellValueFromProto(ColumnInterpreter ci, 
AggregateResponse resp,
-111  int firstPartIndex) throws 
IOException {
-112Q q = 
getParsedGenericInstance(ci.getClass(), 3, 
resp.getFirstPart(firstPartIndex));
-113return ci.getCellValueFromProto(q);
-114  }
-115
-116  private static  S
-117  
getPromotedValueFromProto(ColumnInterpreter ci, 
AggregateResponse resp,
-118  int firstPartIndex) throws 
IOException {
-119T t = 
getParsedGenericInstance(ci.getClass(), 4, 
resp.getFirstPart(firstPartIndex));
-120return 
ci.getPromotedValueFromProto(t);
-121  }
-122
-123  public static  CompletableFuture
-124  max(RawAsyncTable table, 
ColumnInterpreter ci, Scan scan) 

[30/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
 
b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
index ac80981..54a1767 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.HBaseHandler.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class ThriftServerRunner.HBaseHandler
+public static class ThriftServerRunner.HBaseHandler
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 The HBaseHandler is a glue object that connects Thrift RPC 
calls to the
@@ -721,7 +721,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 conf
-protected org.apache.hadoop.conf.Configuration conf
+protected org.apache.hadoop.conf.Configuration conf
 
 
 
@@ -730,7 +730,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 LOG
-protected static final org.apache.commons.logging.Log LOG
+protected static final org.apache.commons.logging.Log LOG
 
 
 
@@ -739,7 +739,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 nextScannerId
-protected int nextScannerId
+protected int nextScannerId
 
 
 
@@ -748,7 +748,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 scannerMap
-protected http://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true";
 title="class or interface in java.util">HashMapInteger,ThriftServerRunner.ResultScannerWrapper> 
scannerMap
+protected http://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true";
 title="class or interface in java.util">HashMapInteger,ThriftServerRunner.ResultScannerWrapper> 
scannerMap
 
 
 
@@ -757,7 +757,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 metrics
-private ThriftMetrics metrics
+private ThriftMetrics metrics
 
 
 
@@ -766,7 +766,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 connectionCache
-private final ConnectionCache connectionCache
+private final ConnectionCache connectionCache
 
 
 
@@ -775,7 +775,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 coalescer
-IncrementCoalescer coalescer
+IncrementCoalescer coalescer
 
 
 
@@ -784,7 +784,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 CLEANUP_INTERVAL
-static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CLEANUP_INTERVAL
+static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String CLEANUP_INTERVAL
 
 See Also:
 Constant
 Field Values
@@ -797,7 +797,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 MAX_IDLETIME
-static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MAX_IDLETIME
+static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MAX_IDLETIME
 
 See Also:
 Constant
 Field Values
@@ -818,7 +818,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 HBaseHandler
-protected HBaseHandler(org.apache.hadoop.conf.Configuration c,
+protected HBaseHandler(org.apache.hadoop.conf.Configuration c,
UserProvider userProvider)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
@@ -841,7 +841,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 getAllColumns
-byte[][] getAllColumns(Table table)
+byte[][] getAllColumns(Table table)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Returns a list of all the column families for a given 
Table.
 
@@ -858,7 +858,7 @@ implements 
org.apache.hadoop.hbase.thrift.generated.Hbase.Iface
 
 
 getTable
-public Table getTable(byte[] tableName)
+public Table getTable(byte[] tableName)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="

[40/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncTable.CoprocessorCallable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncTable.CoprocessorCallable.html
 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncTable.CoprocessorCallable.html
index bb11a14..65276e4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncTable.CoprocessorCallable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncTable.CoprocessorCallable.html
@@ -97,6 +97,19 @@
 
 
 Uses of RawAsyncTable.CoprocessorCallable in org.apache.hadoop.hbase.client
+
+Fields in org.apache.hadoop.hbase.client
 declared as RawAsyncTable.CoprocessorCallable 
+
+Modifier and Type
+Field and Description
+
+
+
+private RawAsyncTable.CoprocessorCallable
+RawAsyncTableImpl.CoprocessorServiceBuilderImpl.callable 
+
+
+
 
 Methods in org.apache.hadoop.hbase.client
 with parameters of type RawAsyncTable.CoprocessorCallable 
 
@@ -136,53 +149,33 @@
 
 
 
- void
-RawAsyncTableImpl.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true";
 title="class or interface in 
java.util.function">Function stubMaker,
+ RawAsyncTable.CoprocessorServiceBuilder
+RawAsyncTableImpl.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true";
 title="class or interface in 
java.util.function">Function stubMaker,
   RawAsyncTable.CoprocessorCallable callable,
-  byte[] startKey,
-  boolean startKeyInclusive,
-  byte[] endKey,
-  boolean endKeyInclusive,
   RawAsyncTable.CoprocessorCallback callback) 
 
 
- void
-RawAsyncTable.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true";
 title="class or interface in 
java.util.function">Function stubMaker,
+ RawAsyncTable.CoprocessorServiceBuilder
+RawAsyncTable.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true";
 title="class or interface in 
java.util.function">Function stubMaker,
   RawAsyncTable.CoprocessorCallable callable,
-  byte[] startKey,
-  boolean startKeyInclusive,
-  byte[] endKey,
-  boolean endKeyInclusive,
   RawAsyncTable.CoprocessorCallback callback)
-Execute the given coprocessor call on the regions which are 
covered by the range from
- startKey and endKey.
+Execute a coprocessor call on the regions which are covered 
by a range.
 
 
 
-default  void
-RawAsyncTable.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true";
 title="class or interface in 
java.util.function">Function stubMaker,
-  RawAsyncTable.CoprocessorCallable callable,
-  byte[] startKey,
-  byte[] endKey,
-  RawAsyncTable.CoprocessorCallback callback)
-Execute the given coprocessor call on the regions which are 
covered by the range from
- startKey inclusive and endKey exclusive.
-
-
-
 private  http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture
 RawAsyncTableImpl.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true";
 title="class or interface in 
java.util.function">Function stubMaker,
   RawAsyncTable.CoprocessorCallable callable,
   RegionInfo region,
   byte[] row) 
 
-
+
  http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture
 AsyncHBaseAdmin.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true";
 title="class or interface in 
java.util.function">Function stubMaker,
   RawAsyncTable.CoprocessorCallable callable,
   ServerName serverName) 
 
-
+
  http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture
 AsyncAdmin.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true"

[07/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.ListenerInfo.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.ListenerInfo.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.ListenerInfo.html
index 9b5e564..5f34a86 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.ListenerInfo.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.ListenerInfo.html
@@ -25,1367 +25,1374 @@
 017 */
 018package org.apache.hadoop.hbase.http;
 019
-020import java.io.FileNotFoundException;
-021import java.io.IOException;
-022import java.io.InterruptedIOException;
-023import java.io.PrintStream;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.URI;
-027import java.net.URISyntaxException;
-028import java.net.URL;
-029import java.util.ArrayList;
-030import java.util.Collections;
-031import java.util.Enumeration;
-032import java.util.HashMap;
-033import java.util.List;
-034import java.util.Map;
-035
-036import javax.servlet.Filter;
-037import javax.servlet.FilterChain;
-038import javax.servlet.FilterConfig;
-039import javax.servlet.ServletContext;
-040import javax.servlet.ServletException;
-041import javax.servlet.ServletRequest;
-042import javax.servlet.ServletResponse;
-043import javax.servlet.http.HttpServlet;
-044import 
javax.servlet.http.HttpServletRequest;
-045import 
javax.servlet.http.HttpServletRequestWrapper;
-046import 
javax.servlet.http.HttpServletResponse;
-047
-048import org.apache.commons.logging.Log;
-049import 
org.apache.commons.logging.LogFactory;
-050import 
org.apache.hadoop.HadoopIllegalArgumentException;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import 
org.apache.yetus.audience.InterfaceStability;
-053import 
org.apache.hadoop.conf.Configuration;
-054import 
org.apache.hadoop.fs.CommonConfigurationKeys;
-055import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-056import 
org.apache.hadoop.hbase.http.conf.ConfServlet;
-057import 
org.apache.hadoop.hbase.http.jmx.JMXJsonServlet;
-058import 
org.apache.hadoop.hbase.http.log.LogLevel;
-059import 
org.apache.hadoop.hbase.util.Threads;
-060import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-061import 
org.apache.hadoop.security.SecurityUtil;
-062import 
org.apache.hadoop.security.UserGroupInformation;
-063import 
org.apache.hadoop.security.authentication.server.AuthenticationFilter;
-064import 
org.apache.hadoop.security.authorize.AccessControlList;
-065import org.apache.hadoop.util.Shell;
-066
-067import 
org.eclipse.jetty.http.HttpVersion;
-068import org.eclipse.jetty.server.Server;
-069import 
org.eclipse.jetty.server.Handler;
-070import 
org.eclipse.jetty.server.HttpConfiguration;
-071import 
org.eclipse.jetty.server.HttpConnectionFactory;
-072import 
org.eclipse.jetty.server.ServerConnector;
-073import 
org.eclipse.jetty.server.SecureRequestCustomizer;
-074import 
org.eclipse.jetty.server.SslConnectionFactory;
-075import 
org.eclipse.jetty.server.handler.ContextHandlerCollection;
-076import 
org.eclipse.jetty.server.handler.HandlerCollection;
-077import 
org.eclipse.jetty.server.RequestLog;
-078import 
org.eclipse.jetty.server.handler.RequestLogHandler;
-079import 
org.eclipse.jetty.servlet.FilterMapping;
-080import 
org.eclipse.jetty.servlet.ServletHandler;
-081import 
org.eclipse.jetty.servlet.FilterHolder;
-082import 
org.eclipse.jetty.servlet.ServletContextHandler;
-083import 
org.eclipse.jetty.servlet.DefaultServlet;
-084import 
org.eclipse.jetty.servlet.ServletHolder;
-085import 
org.eclipse.jetty.util.MultiException;
-086import 
org.eclipse.jetty.util.ssl.SslContextFactory;
-087import 
org.eclipse.jetty.util.thread.QueuedThreadPool;
-088import 
org.eclipse.jetty.webapp.WebAppContext;
-089
-090import 
org.glassfish.jersey.server.ResourceConfig;
-091import 
org.glassfish.jersey.servlet.ServletContainer;
-092
-093import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-094import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-095
-096/**
-097 * Create a Jetty embedded server to 
answer http requests. The primary goal
-098 * is to serve up status information for 
the server.
-099 * There are three contexts:
-100 *   "/logs/" -> points to the log 
directory
-101 *   "/static/" -> points to 
common static files (src/webapps/static)
-102 *   "/" -> the jsp server code 
from (src/webapps/<name>)
-103 */
-104@InterfaceAudience.Private
-105@InterfaceStability.Evolving
-106public class HttpServer implements 
FilterContainer {
-107  private static final Log LOG = 
LogFactory.getLog(HttpServer.class);
-108  private static final String 
EMPTY_STRING = "";
-109
-110  private static final int 
DEFAULT_MAX_HEADER_SIZE = 64 * 1024; // 64K
-111
-112  static final String 
FILTER_INITIALIZERS_PROPERTY
-113  = 
"hbas

[38/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.ReorderWALBlocks.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.ReorderWALBlocks.html 
b/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.ReorderWALBlocks.html
index fddb965..f199756 100644
--- a/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.ReorderWALBlocks.html
+++ b/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.ReorderWALBlocks.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static class HFileSystem.ReorderWALBlocks
+static class HFileSystem.ReorderWALBlocks
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements HFileSystem.ReorderBlocks
 We're putting at lowest priority the wal files blocks that 
are on the same datanode
@@ -192,7 +192,7 @@ implements 
 
 ReorderWALBlocks
-ReorderWALBlocks()
+ReorderWALBlocks()
 
 
 
@@ -209,7 +209,7 @@ implements 
 
 reorderBlocks
-public void reorderBlocks(org.apache.hadoop.conf.Configuration conf,
+public void reorderBlocks(org.apache.hadoop.conf.Configuration conf,
   
org.apache.hadoop.hdfs.protocol.LocatedBlocks lbs,
   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String src)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.html 
b/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.html
index bd84c51..9f947d2 100644
--- a/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.html
+++ b/devapidocs/org/apache/hadoop/hbase/fs/HFileSystem.html
@@ -187,24 +187,18 @@ extends org.apache.hadoop.fs.FilterFileSystem
 Field and Description
 
 
-static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-HBASE_WAL_DIR
-Parameter name for HBase WAL directory
-
-
-
 static org.apache.commons.logging.Log
 LOG 
 
-
+
 private org.apache.hadoop.fs.FileSystem
 noChecksumFs 
 
-
+
 private static byte
 unspecifiedStoragePolicyId 
 
-
+
 private boolean
 useHBaseChecksum 
 
@@ -416,27 +410,13 @@ extends org.apache.hadoop.fs.FilterFileSystem
 public static final org.apache.commons.logging.Log LOG
 
 
-
-
-
-
-
-HBASE_WAL_DIR
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String HBASE_WAL_DIR
-Parameter name for HBase WAL directory
-
-See Also:
-Constant
 Field Values
-
-
-
 
 
 
 
 
 noChecksumFs
-private final org.apache.hadoop.fs.FileSystem noChecksumFs
+private final org.apache.hadoop.fs.FileSystem noChecksumFs
 
 
 
@@ -445,7 +425,7 @@ extends org.apache.hadoop.fs.FilterFileSystem
 
 
 useHBaseChecksum
-private final boolean useHBaseChecksum
+private final boolean useHBaseChecksum
 
 
 
@@ -454,7 +434,7 @@ extends org.apache.hadoop.fs.FilterFileSystem
 
 
 unspecifiedStoragePolicyId
-private static volatile byte unspecifiedStoragePolicyId
+private static volatile byte unspecifiedStoragePolicyId
 
 
 
@@ -471,7 +451,7 @@ extends org.apache.hadoop.fs.FilterFileSystem
 
 
 HFileSystem
-public HFileSystem(org.apache.hadoop.conf.Configuration conf,
+public HFileSystem(org.apache.hadoop.conf.Configuration conf,
boolean useHBaseChecksum)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 Create a FileSystem object for HBase regionservers.
@@ -492,7 +472,7 @@ extends org.apache.hadoop.fs.FilterFileSystem
 
 
 HFileSystem
-public HFileSystem(org.apache.hadoop.fs.FileSystem fs)
+public HFileSystem(org.apache.hadoop.fs.FileSystem fs)
 Wrap a FileSystem object within a HFileSystem. The 
noChecksumFs and
  writefs are both set to be the same specified fs. 
  Do not verify hbase-checksums while reading data from filesystem.
@@ -516,7 +496,7 @@ extends org.apache.hadoop.fs.FilterFileSystem
 
 
 getNoChecksumFs
-public org.apache.hadoop.fs.FileSystem getNoChecksumFs()
+public org.apache.hadoop.fs.FileSystem getNoChecksumFs()
 Returns the filesystem that is specially setup for 
  doing reads from storage. This object avoids doing 
  checksum verifications for reads.
@@ -533,7 +513,7 @@ extends org.apache.hadoop.fs.FilterFileSystem
 
 
 getBackingFs
-public org.apache.hadoop.fs.FileSystem getBackingFs()
+public org.apache.hadoop.fs.FileSystem getBackingFs()
 

[26/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/util/CompressionTest.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/CompressionTest.html 
b/devapidocs/org/apache/hadoop/hbase/util/CompressionTest.html
index 1adc5af..2234325 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/CompressionTest.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/CompressionTest.html
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-Prev Class
+Prev Class
 Next Class
 
 
@@ -363,7 +363,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-Prev Class
+Prev Class
 Next Class
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/util/FSHDFSUtils.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/FSHDFSUtils.html 
b/devapidocs/org/apache/hadoop/hbase/util/FSHDFSUtils.html
index ad49a74..f40f358 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSHDFSUtils.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSHDFSUtils.html
@@ -100,6 +100,9 @@ var activeTableTab = "activeTableTab";
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
 
 
+org.apache.hadoop.hbase.util.CommonFSUtils
+
+
 org.apache.hadoop.hbase.util.FSUtils
 
 
@@ -109,6 +112,8 @@ var activeTableTab = "activeTableTab";
 
 
 
+
+
 
 
 
@@ -138,6 +143,13 @@ extends Nested classes/interfaces inherited from 
class org.apache.hadoop.hbase.util.FSUtils
 FSUtils.BlackListDirFilter, 
FSUtils.DirFilter, FSUtils.FamilyDirFilter, FSUtils.FileFilter, FSUtils.HFileFilter, FSUtils.HFileLinkFilter, FSUtils.ReferenceFileFilter, FSUtils.RegionDirFilter, FSUtils.UserTableDirFilter
 
+
+
+
+
+Nested classes/interfaces inherited from 
class org.apache.hadoop.hbase.util.CommonFSUtils
+CommonFSUtils.StreamLacksCapabilityException
+
 
 
 
@@ -170,7 +182,14 @@ extends 
 
 Fields inherited from class org.apache.hadoop.hbase.util.FSUtils
-FULL_RWX_PERMISSIONS,
 WINDOWS
+WINDOWS
+
+
+
+
+
+Fields inherited from class org.apache.hadoop.hbase.util.CommonFSUtils
+FULL_RWX_PERMISSIONS,
 HBASE_WAL_DIR
 
 
 
@@ -271,7 +290,14 @@ extends 
 
 Methods inherited from class org.apache.hadoop.hbase.util.FSUtils
-addToHDFSBlocksDistribution,
 checkAccess,
 checkClusterIdExists,
 checkDfsSafeMode,
 checkFileSystemAvailable,
 checkShortCircuitReadBufferSize,
 checkVersion,
 checkVersion,
 computeHDFSBlocksDistribution,
 create,
 create,
 delete,
 deleteDirectory,
 deleteRegionDir,
 filterFileStatuses,
 filterFileStatuses,
 getClusterId,
 getCurrentFileSystem,
 getDefaultBlockSize,
 getDefaultBufferSize,
 getDefaultReplication,
 getDFSHedgedReadMetrics,
 getFamilyDirs, 
getFilePermissions,
 getInstance,
 getLocalTableDirs,
 getNamespaceDir,
 getPath,
 getReferenceFilePaths, getRegionDegreeLocalityMappingFromFS,
 getRegionDegreeLocalityMappingFromFS,
 getRegionDirs,
 getRegionReferenceFileCount,
 getRootDir,
 getRootDirFileSystem, getTableDir,
 getTableDirs,
 getTableFragmentation,
 getTableFragmentation,
 getTableName,
 getTa
 bleStoreFilePathMap, getTableStoreFilePathMap,
 getTableStoreFilePathMap,
 getTableStoreFilePathMap,
 getTotalTableFragmentation, 
getVersion,
 getWALFileSystem,
 getWALRootDir,
 isDistributedFileSystem,
 isExists,
 isHDFS,
 isMatchingTail,
 isMatchingTail,
 isRecoveredEdits,
 isStartingWithPath,
 listLocatedStatus,
 listStatus,
 listStatus, listStatusWithStatusFilter,
 logFileSystemState,
 metaRegionExists,
 parseVersionFrom,
 removeWALRootPath,
 renameAndSetModifyTime,
 setClusterId,
 setFsDefault,
 setRootDir,
 setStoragePolicy,
 setStoragePolicy, setupShortCircuitRead,
 setVersion,
 setVersion,
 setVersion,
 setWALRootDir,
 toVersionByteArray
 , validateRootPath,
 waitOnSafeMode
+addToHDFSBlocksDistribution,
 checkAccess,
 checkClusterIdExists,
 checkDfsSafeMode,
 checkFileSystemAvailable,
 checkShortCircuitReadBufferSize,
 checkVersion,
 checkVersion,
 computeHDFSBlocksDistribution,
 create,
 deleteRegionDir, filterFileStatuses,
 filterFileStatuses,
 getClusterId,
 getDFSHedgedReadMetrics,
 getFamilyDirs,
 getInstance,
 getLocalTableDirs,
 getReferenceFilePaths,
 getRegionDegreeLocalityMappingFromFS,
 getRegionDegreeLocalityMappingFromFS,
 getRegionDirs,
 getRegionReferenceFileCount,
 getTableDirs,
 getTableFragmentation,
 getTableFragmentation,
 getTableStoreFilePathMap,
 getTableStoreFilePathMap,
 getTableStoreFilePathMap,
 getTableStoreFilePathMap,
 getTotalTableFragmentation,
 getVersion, isDistributedFileSystem,
 isMatchingTail,
 listStatusWithStatusFilter,
 metaRegionExists,
 parseVersionFrom,
 setClusterId, setupShortCircuitRead,
 setVersion,
 setVersion,
 setVersion,
 toVersionByteArray,
 waitOnSafeMode
+
+
+
+
+
+Methods inherited from class org.apache.hadoop.hbase.util.CommonFSUtils
+c

[36/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html 
b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
index 74fdee7..989cd43 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public final class AsyncFSOutputHelper
+public final class AsyncFSOutputHelper
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 Helper class for creating AsyncFSOutput.
 
@@ -192,7 +192,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 AsyncFSOutputHelper
-private AsyncFSOutputHelper()
+private AsyncFSOutputHelper()
 
 
 
@@ -209,7 +209,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 createOutput
-public static AsyncFSOutput createOutput(org.apache.hadoop.fs.FileSystem fs,
+public static AsyncFSOutput createOutput(org.apache.hadoop.fs.FileSystem fs,
  org.apache.hadoop.fs.Path f,
  boolean overwrite,
  boolean createParent,
@@ -217,12 +217,14 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
  long blockSize,
  
org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop eventLoop,
  http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class channelClass)
-  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
+  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException,
+ CommonFSUtils.StreamLacksCapabilityException
 Create FanOutOneBlockAsyncDFSOutput
 for DistributedFileSystem, and a simple
  implementation for other FileSystem which wraps around a 
FSDataOutputStream.
 
 Throws:
 http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
+CommonFSUtils.StreamLacksCapabilityException
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index aaf33df..6793d5b 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -273,12 +273,12 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.io.hfile.CacheConfig.ExternalBlockCaches
-org.apache.hadoop.hbase.io.hfile.BlockPriority
-org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory
-org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType
 org.apache.hadoop.hbase.io.hfile.HFileBlock.Writer.State
+org.apache.hadoop.hbase.io.hfile.CacheConfig.ExternalBlockCaches
 org.apache.hadoop.hbase.io.hfile.BlockType
+org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType
+org.apache.hadoop.hbase.io.hfile.BlockType.BlockCategory
+org.apache.hadoop.hbase.io.hfile.BlockPriority
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
index 9f3fe52..d57391b 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/package-tree.html
@@ -343,9 +343,9 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=tru

[01/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site d4303803b -> 7d38bdbbf


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
index cb9f9a8..a2a8d9c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/HMaster.InitializationMonitor.html
@@ -61,3475 +61,3465 @@
 053import org.apache.commons.logging.Log;
 054import 
org.apache.commons.logging.LogFactory;
 055import 
org.apache.hadoop.conf.Configuration;
-056import org.apache.hadoop.fs.FileSystem;
-057import org.apache.hadoop.fs.Path;
-058import 
org.apache.hadoop.hbase.ClusterStatus;
-059import 
org.apache.hadoop.hbase.ClusterStatus.Option;
-060import 
org.apache.hadoop.hbase.CoordinatedStateException;
-061import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-062import 
org.apache.hadoop.hbase.HBaseIOException;
-063import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-064import 
org.apache.hadoop.hbase.HConstants;
-065import 
org.apache.hadoop.hbase.InvalidFamilyOperationException;
-066import 
org.apache.hadoop.hbase.MasterNotRunningException;
-067import 
org.apache.hadoop.hbase.MetaTableAccessor;
-068import 
org.apache.hadoop.hbase.NamespaceDescriptor;
-069import 
org.apache.hadoop.hbase.PleaseHoldException;
-070import 
org.apache.hadoop.hbase.ServerLoad;
-071import 
org.apache.hadoop.hbase.ServerName;
-072import 
org.apache.hadoop.hbase.TableDescriptors;
-073import 
org.apache.hadoop.hbase.TableName;
-074import 
org.apache.hadoop.hbase.TableNotDisabledException;
-075import 
org.apache.hadoop.hbase.TableNotFoundException;
-076import 
org.apache.hadoop.hbase.UnknownRegionException;
-077import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-078import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
-079import 
org.apache.hadoop.hbase.client.MasterSwitchType;
-080import 
org.apache.hadoop.hbase.client.RegionInfo;
-081import 
org.apache.hadoop.hbase.client.Result;
-082import 
org.apache.hadoop.hbase.client.TableDescriptor;
-083import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-084import 
org.apache.hadoop.hbase.client.TableState;
-085import 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-086import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
-087import 
org.apache.hadoop.hbase.exceptions.MergeRegionException;
-088import 
org.apache.hadoop.hbase.executor.ExecutorType;
-089import 
org.apache.hadoop.hbase.favored.FavoredNodesManager;
-090import 
org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
-091import 
org.apache.hadoop.hbase.http.InfoServer;
-092import 
org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
-093import 
org.apache.hadoop.hbase.ipc.RpcServer;
-094import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-095import 
org.apache.hadoop.hbase.master.MasterRpcServices.BalanceSwitchMode;
-096import 
org.apache.hadoop.hbase.master.assignment.AssignmentManager;
-097import 
org.apache.hadoop.hbase.master.assignment.MergeTableRegionsProcedure;
-098import 
org.apache.hadoop.hbase.master.assignment.RegionStates;
-099import 
org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
-100import 
org.apache.hadoop.hbase.master.balancer.BalancerChore;
-101import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer;
-102import 
org.apache.hadoop.hbase.master.balancer.ClusterStatusChore;
-103import 
org.apache.hadoop.hbase.master.balancer.LoadBalancerFactory;
-104import 
org.apache.hadoop.hbase.master.cleaner.HFileCleaner;
-105import 
org.apache.hadoop.hbase.master.cleaner.LogCleaner;
-106import 
org.apache.hadoop.hbase.master.cleaner.ReplicationMetaCleaner;
-107import 
org.apache.hadoop.hbase.master.cleaner.ReplicationZKNodeCleaner;
-108import 
org.apache.hadoop.hbase.master.cleaner.ReplicationZKNodeCleanerChore;
-109import 
org.apache.hadoop.hbase.master.locking.LockManager;
-110import 
org.apache.hadoop.hbase.master.normalizer.NormalizationPlan;
-111import 
org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType;
-112import 
org.apache.hadoop.hbase.master.normalizer.RegionNormalizer;
-113import 
org.apache.hadoop.hbase.master.normalizer.RegionNormalizerChore;
-114import 
org.apache.hadoop.hbase.master.normalizer.RegionNormalizerFactory;
-115import 
org.apache.hadoop.hbase.master.procedure.CreateTableProcedure;
-116import 
org.apache.hadoop.hbase.master.procedure.DeleteTableProcedure;
-117import 
org.apache.hadoop.hbase.master.procedure.DisableTableProcedure;
-118import 
org.apache.hadoop.hbase.master.procedure.EnableTableProcedure;
-119import 
org.apache.hadoop.hbase.master.procedure.Master

[22/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/overview-tree.html
--
diff --git a/devapidocs/overview-tree.html b/devapidocs/overview-tree.html
index 884017d..2833690 100644
--- a/devapidocs/overview-tree.html
+++ b/devapidocs/overview-tree.html
@@ -1129,6 +1129,17 @@
 org.apache.hadoop.hbase.io.hfile.InclusiveCombinedBlockCache (implements 
org.apache.hadoop.hbase.io.hfile.BlockCache)
 
 
+org.apache.hadoop.hbase.util.CommonFSUtils
+
+org.apache.hadoop.hbase.util.FSUtils
+
+org.apache.hadoop.hbase.util.FSHDFSUtils
+org.apache.hadoop.hbase.util.FSMapRUtils
+
+
+
+
+org.apache.hadoop.hbase.util.CommonFSUtils.StreamCapabilities
 org.apache.hadoop.hbase.io.crypto.aes.CommonsCryptoAESDecryptor (implements 
org.apache.hadoop.hbase.io.crypto.Decryptor)
 org.apache.hadoop.hbase.io.crypto.aes.CommonsCryptoAESEncryptor (implements 
org.apache.hadoop.hbase.io.crypto.Encryptor)
 org.apache.hadoop.hbase.regionserver.CompactingMemStore.InMemoryFlushRunnable 
(implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable)
@@ -1666,12 +1677,6 @@
 org.apache.hadoop.hbase.regionserver.wal.FSHLog.SafePointZigZagLatch
 org.apache.hadoop.hbase.util.FSRegionScanner (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true";
 title="class or interface in java.lang">Runnable)
 org.apache.hadoop.hbase.util.FSTableDescriptors (implements 
org.apache.hadoop.hbase.TableDescriptors)
-org.apache.hadoop.hbase.util.FSUtils
-
-org.apache.hadoop.hbase.util.FSHDFSUtils
-org.apache.hadoop.hbase.util.FSMapRUtils
-
-
 org.apache.hadoop.hbase.util.FSUtils.HFileLinkFilter (implements 
org.apache.hadoop.fs.PathFilter)
 org.apache.hadoop.hbase.util.FSVisitor
 java.util.concurrent.http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/FutureTask.html?is-external=true";
 title="class or interface in java.util.concurrent">FutureTask (implements 
java.util.concurrent.http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/RunnableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">RunnableFuture)
@@ -2837,6 +2842,7 @@
 
 org.apache.hadoop.hbase.client.RawAsyncTableImpl (implements 
org.apache.hadoop.hbase.client.RawAsyncTable)
 org.apache.hadoop.hbase.client.RawAsyncTableImpl.CheckAndMutateBuilderImpl 
(implements org.apache.hadoop.hbase.client.AsyncTableBase.CheckAndMutateBuilder)
+org.apache.hadoop.hbase.client.RawAsyncTableImpl.CoprocessorServiceBuilderImpl
 (implements org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorServiceBuilder)
 org.apache.hadoop.hbase.types.RawByte 
(implements org.apache.hadoop.hbase.types.DataType)
 org.apache.hadoop.hbase.types.RawBytes 
(implements org.apache.hadoop.hbase.types.DataType)
 org.apache.hadoop.hbase.types.RawDouble 
(implements org.apache.hadoop.hbase.types.DataType)
@@ -3753,6 +3759,7 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception
 
+org.apache.hadoop.hbase.util.CommonFSUtils.StreamLacksCapabilityException
 org.apache.hadoop.hbase.http.conf.ConfServlet.BadFormatException
 org.apache.hadoop.hbase.exceptions.HBaseException
 
@@ -4898,6 +4905,7 @@
 org.apache.hadoop.hbase.client.RawAsyncHBaseAdmin.TableOperator
 org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorCallable
 org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorCallback
+org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorServiceBuilder
 org.apache.hadoop.hbase.client.RawAsyncTableImpl.Converter
 org.apache.hadoop.hbase.client.RawAsyncTableImpl.NoncedConverter
 org.apache.hadoop.hbase.client.RawAsyncTableImpl.RpcCall

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/serialized-form.html
--
diff --git a/devapidocs/serialized-form.html b/devapidocs/serialized-form.html
index 2a79fb9..b791ce9 100644
--- a/devapidocs/serialized-form.html
+++ b/devapidocs/serialized-form.html
@@ -2300,6 +2300,11 @@
 
 Package org.apache.hadoop.hbase.util
 
+
+
+
+Class org.apache.hadoop.hbase.util.CommonFSUtils.StreamLacksCapabilityException
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true";
 title="class or interface in java.lang">Exception implements 
Serializable
+
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/Version.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/Version.html
index 4886d6b..e7c9219 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/Version.

[13/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.AbstractAggregationCallback.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.AbstractAggregationCallback.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.AbstractAggregationCallback.html
index cece735..cb909d9 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.AbstractAggregationCallback.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.AbstractAggregationCallback.html
@@ -28,446 +28,458 @@
 020import static 
org.apache.hadoop.hbase.client.coprocessor.AggregationHelper.getParsedGenericInstance;
 021import static 
org.apache.hadoop.hbase.client.coprocessor.AggregationHelper.validateArgAndGetPB;
 022
-023import java.io.IOException;
-024import java.util.Map;
-025import java.util.NavigableMap;
-026import java.util.NavigableSet;
-027import 
java.util.NoSuchElementException;
-028import java.util.TreeMap;
-029import 
java.util.concurrent.CompletableFuture;
-030
-031import org.apache.hadoop.hbase.Cell;
-032import 
org.apache.hadoop.hbase.client.RawAsyncTable;
-033import 
org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorCallback;
-034import 
org.apache.hadoop.hbase.client.RawScanResultConsumer;
-035import 
org.apache.hadoop.hbase.client.RegionInfo;
-036import 
org.apache.hadoop.hbase.client.Result;
-037import 
org.apache.hadoop.hbase.client.Scan;
-038import 
org.apache.hadoop.hbase.coprocessor.ColumnInterpreter;
-039import 
org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest;
-040import 
org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse;
-041import 
org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateService;
-042import 
org.apache.hadoop.hbase.util.Bytes;
-043import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-044import 
org.apache.yetus.audience.InterfaceAudience;
-045
-046import com.google.protobuf.Message;
-047
-048/**
-049 * This client class is for invoking the 
aggregate functions deployed on the Region Server side via
-050 * the AggregateService. This class will 
implement the supporting functionality for
-051 * summing/processing the individual 
results obtained from the AggregateService for each region.
-052 */
-053@InterfaceAudience.Public
-054public class AsyncAggregationClient {
-055
-056  private static abstract class 
AbstractAggregationCallback
-057  implements 
CoprocessorCallback {
-058
-059private final 
CompletableFuture future;
-060
-061protected boolean finished = false;
-062
-063private void 
completeExceptionally(Throwable error) {
-064  if (finished) {
-065return;
-066  }
-067  finished = true;
-068  
future.completeExceptionally(error);
-069}
-070
-071protected 
AbstractAggregationCallback(CompletableFuture future) {
-072  this.future = future;
-073}
-074
-075@Override
-076public synchronized void 
onRegionError(RegionInfo region, Throwable error) {
-077  completeExceptionally(error);
-078}
-079
-080@Override
-081public synchronized void 
onError(Throwable error) {
-082  completeExceptionally(error);
-083}
-084
-085protected abstract void 
aggregate(RegionInfo region, AggregateResponse resp)
-086throws IOException;
-087
-088@Override
-089public synchronized void 
onRegionComplete(RegionInfo region, AggregateResponse resp) {
-090  try {
-091aggregate(region, resp);
-092  } catch (IOException e) {
-093completeExceptionally(e);
-094  }
-095}
-096
-097protected abstract T 
getFinalResult();
-098
-099@Override
-100public synchronized void onComplete() 
{
-101  if (finished) {
-102return;
-103  }
-104  finished = true;
-105  
future.complete(getFinalResult());
-106}
-107  }
-108
-109  private static  R
-110  
getCellValueFromProto(ColumnInterpreter ci, 
AggregateResponse resp,
-111  int firstPartIndex) throws 
IOException {
-112Q q = 
getParsedGenericInstance(ci.getClass(), 3, 
resp.getFirstPart(firstPartIndex));
-113return ci.getCellValueFromProto(q);
-114  }
-115
-116  private static  S
-117  
getPromotedValueFromProto(ColumnInterpreter ci, 
AggregateResponse resp,
-118  int firstPartIndex) throws 
IOException {
-119T t = 
getParsedGenericInstance(ci.getClass(), 4, 
resp.getFirstPart(firstPartIndex));
-120return 
ci.getPromotedValueFromProto(t);
-121  }
-122
-123  public static 

[05/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.QuotingInputFilter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.QuotingInputFilter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.QuotingInputFilter.html
index 9b5e564..5f34a86 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.QuotingInputFilter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.QuotingInputFilter.html
@@ -25,1367 +25,1374 @@
 017 */
 018package org.apache.hadoop.hbase.http;
 019
-020import java.io.FileNotFoundException;
-021import java.io.IOException;
-022import java.io.InterruptedIOException;
-023import java.io.PrintStream;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.URI;
-027import java.net.URISyntaxException;
-028import java.net.URL;
-029import java.util.ArrayList;
-030import java.util.Collections;
-031import java.util.Enumeration;
-032import java.util.HashMap;
-033import java.util.List;
-034import java.util.Map;
-035
-036import javax.servlet.Filter;
-037import javax.servlet.FilterChain;
-038import javax.servlet.FilterConfig;
-039import javax.servlet.ServletContext;
-040import javax.servlet.ServletException;
-041import javax.servlet.ServletRequest;
-042import javax.servlet.ServletResponse;
-043import javax.servlet.http.HttpServlet;
-044import 
javax.servlet.http.HttpServletRequest;
-045import 
javax.servlet.http.HttpServletRequestWrapper;
-046import 
javax.servlet.http.HttpServletResponse;
-047
-048import org.apache.commons.logging.Log;
-049import 
org.apache.commons.logging.LogFactory;
-050import 
org.apache.hadoop.HadoopIllegalArgumentException;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import 
org.apache.yetus.audience.InterfaceStability;
-053import 
org.apache.hadoop.conf.Configuration;
-054import 
org.apache.hadoop.fs.CommonConfigurationKeys;
-055import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-056import 
org.apache.hadoop.hbase.http.conf.ConfServlet;
-057import 
org.apache.hadoop.hbase.http.jmx.JMXJsonServlet;
-058import 
org.apache.hadoop.hbase.http.log.LogLevel;
-059import 
org.apache.hadoop.hbase.util.Threads;
-060import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-061import 
org.apache.hadoop.security.SecurityUtil;
-062import 
org.apache.hadoop.security.UserGroupInformation;
-063import 
org.apache.hadoop.security.authentication.server.AuthenticationFilter;
-064import 
org.apache.hadoop.security.authorize.AccessControlList;
-065import org.apache.hadoop.util.Shell;
-066
-067import 
org.eclipse.jetty.http.HttpVersion;
-068import org.eclipse.jetty.server.Server;
-069import 
org.eclipse.jetty.server.Handler;
-070import 
org.eclipse.jetty.server.HttpConfiguration;
-071import 
org.eclipse.jetty.server.HttpConnectionFactory;
-072import 
org.eclipse.jetty.server.ServerConnector;
-073import 
org.eclipse.jetty.server.SecureRequestCustomizer;
-074import 
org.eclipse.jetty.server.SslConnectionFactory;
-075import 
org.eclipse.jetty.server.handler.ContextHandlerCollection;
-076import 
org.eclipse.jetty.server.handler.HandlerCollection;
-077import 
org.eclipse.jetty.server.RequestLog;
-078import 
org.eclipse.jetty.server.handler.RequestLogHandler;
-079import 
org.eclipse.jetty.servlet.FilterMapping;
-080import 
org.eclipse.jetty.servlet.ServletHandler;
-081import 
org.eclipse.jetty.servlet.FilterHolder;
-082import 
org.eclipse.jetty.servlet.ServletContextHandler;
-083import 
org.eclipse.jetty.servlet.DefaultServlet;
-084import 
org.eclipse.jetty.servlet.ServletHolder;
-085import 
org.eclipse.jetty.util.MultiException;
-086import 
org.eclipse.jetty.util.ssl.SslContextFactory;
-087import 
org.eclipse.jetty.util.thread.QueuedThreadPool;
-088import 
org.eclipse.jetty.webapp.WebAppContext;
-089
-090import 
org.glassfish.jersey.server.ResourceConfig;
-091import 
org.glassfish.jersey.servlet.ServletContainer;
-092
-093import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-094import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-095
-096/**
-097 * Create a Jetty embedded server to 
answer http requests. The primary goal
-098 * is to serve up status information for 
the server.
-099 * There are three contexts:
-100 *   "/logs/" -> points to the log 
directory
-101 *   "/static/" -> points to 
common static files (src/webapps/static)
-102 *   "/" -> the jsp server code 
from (src/webapps/<name>)
-103 */
-104@InterfaceAudience.Private
-105@InterfaceStability.Evolving
-106public class HttpServer implements 
FilterContainer {
-107  private static final Log LOG = 
LogFactory.getLog(HttpServer.class);
-108  private static final String 
EMPTY_STRING = "";
-109
-110  private static final int 
DEFAULT_MAX_HEADER_SIZE = 64 * 1024; // 64K
-111
-112  static final String 
FILTER_INITIALIZ

[39/51] [partial] hbase-site git commit: Published site at .

2017-11-03 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.html
 
b/devapidocs/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.html
index e1f5c54..336f38f 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/coprocessor/AsyncAggregationClient.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9};
+var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public class AsyncAggregationClient
+public class AsyncAggregationClient
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 This client class is for invoking the aggregate functions 
deployed on the Region Server side via
  the AggregateService. This class will implement the supporting functionality 
for
@@ -214,24 +214,28 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
Scan scan) 
 
 
+private static byte[]
+nullToEmpty(byte[] b) 
+
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureLong>
 rowCount(RawAsyncTable table,
 ColumnInterpreter ci,
 Scan scan) 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureDouble>
 std(RawAsyncTable table,
ColumnInterpreter ci,
Scan scan) 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in 
java.util.concurrent">CompletableFuture
 sum(RawAsyncTable table,
ColumnInterpreter ci,
Scan scan) 
 
-
+
 private static http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureNavigableMap>
 sumByRegion(RawAsyncTable table,
ColumnInterpreter ci,
@@ -265,7 +269,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 AsyncAggregationClient
-public AsyncAggregationClient()
+public AsyncAggregationClient()
 
 
 
@@ -282,7 +286,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getCellValueFromProto
-private static  R getCellValueFromProto(ColumnInterpreter ci,
+private static  R getCellValueFromProto(ColumnInterpreter ci,


org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse resp,


int firstPartIndex)

 throws 
http://docs.oracle.com/javase/8/docs/api/java/io/IOException.

[35/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html 
b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
index c88c013..e49706a 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/HMaster.html
@@ -128,7 +128,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Tools")
-public class HMaster
+public class HMaster
 extends HRegionServer
 implements MasterServices
 HMaster is the "master server" for HBase. An HBase cluster 
has one active
@@ -1425,7 +1425,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -1434,7 +1434,7 @@ implements 
 
 MASTER
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MASTER
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String MASTER
 
 See Also:
 Constant
 Field Values
@@ -1447,7 +1447,7 @@ implements 
 
 activeMasterManager
-private final ActiveMasterManager activeMasterManager
+private final ActiveMasterManager activeMasterManager
 
 
 
@@ -1456,7 +1456,7 @@ implements 
 
 regionServerTracker
-RegionServerTracker regionServerTracker
+RegionServerTracker regionServerTracker
 
 
 
@@ -1465,7 +1465,7 @@ implements 
 
 drainingServerTracker
-private DrainingServerTracker 
drainingServerTracker
+private DrainingServerTracker 
drainingServerTracker
 
 
 
@@ -1474,7 +1474,7 @@ implements 
 
 loadBalancerTracker
-LoadBalancerTracker loadBalancerTracker
+LoadBalancerTracker loadBalancerTracker
 
 
 
@@ -1483,7 +1483,7 @@ implements 
 
 splitOrMergeTracker
-private SplitOrMergeTracker splitOrMergeTracker
+private SplitOrMergeTracker splitOrMergeTracker
 
 
 
@@ -1492,7 +1492,7 @@ implements 
 
 regionNormalizerTracker
-private RegionNormalizerTracker 
regionNormalizerTracker
+private RegionNormalizerTracker 
regionNormalizerTracker
 
 
 
@@ -1501,7 +1501,7 @@ implements 
 
 maintenanceModeTracker
-private MasterMaintenanceModeTracker maintenanceModeTracker
+private MasterMaintenanceModeTracker maintenanceModeTracker
 
 
 
@@ -1510,7 +1510,7 @@ implements 
 
 clusterSchemaService
-private ClusterSchemaService clusterSchemaService
+private ClusterSchemaService clusterSchemaService
 
 
 
@@ -1519,7 +1519,7 @@ implements 
 
 HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
 
 See Also:
 Constant
 Field Values
@@ -1532,7 +1532,7 @@ implements 
 
 DEFAULT_HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
-public static final int DEFAULT_HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
+public static final int DEFAULT_HBASE_MASTER_WAIT_ON_SERVICE_IN_SECONDS
 
 See Also:
 Constant
 Field Values
@@ -1545,7 +1545,7 @@ implements 
 
 metricsMaster
-final MetricsMaster metricsMaster
+final MetricsMaster metricsMaster
 
 
 
@@ -1554,7 +1554,7 @@ implements 
 
 fileSystemManager
-private MasterFileSystem fileSystemManager
+private MasterFileSystem fileSystemManager
 
 
 
@@ -1563,7 +1563,7 @@ implements 
 
 walManager
-private MasterWalManager walManager
+private MasterWalManager walManager
 
 
 
@@ -1572,7 +1572,7 @@ implements 
 
 serverManager
-private volatile ServerManager serverManager
+private volatile ServerManager serverManager
 
 
 
@@ -1581,7 +1581,7 @@ implements 
 
 assignmentManager
-private AssignmentManager assignmentManager
+private AssignmentManager assignmentManager
 
 
 
@@ -1590,7 +1590,7 @@ implements 
 
 replicationManager
-private ReplicationManager replicationManager
+private ReplicationManager replicationManager
 
 
 
@@ -1599,7 +1599,7 @@ implements 
 
 rsFatals
-MemoryBoundedLogMessageBuffer rsFatals
+MemoryBoundedLogMessageBuffer rsFatals
 
 
 
@@ -1608,7 +1608,7 @@ implements 
 
 activeMaster
-private volatile boolean activeMaster
+private volatile boolean activeMaster
 
 
 
@@ -1617,7 +1617,7 @@ implements 
 
 initialized
-private final ProcedureEvent initialized
+private final ProcedureEvent initialized
 
 
 
@@ -1626,7 +1626,7 @@ implements 
 
 serviceStarted
-volatile boolean serviceStarted
+volatile boolean serviceStarted
 
 
 
@@ -1635,7 +1635,7 @@ implements 
 
 serverCrashProcessingEnabled
-private final ProcedureEvent serverCrashProcessingEnabled
+private final ProcedureEvent serverCrashProcessingEnabled
 
 
 
@@ -1644,7 +1

[51/51] [partial] hbase-site git commit: Published site at .

Published site at .


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/7d38bdbb
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/7d38bdbb
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/7d38bdbb

Branch: refs/heads/asf-site
Commit: 7d38bdbbfc15ae5ef961ada235be46938187c38e
Parents: d430380
Author: jenkins 
Authored: Fri Nov 3 15:16:40 2017 +
Committer: jenkins 
Committed: Fri Nov 3 15:16:40 2017 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf|  2278 +-
 apidocs/index-all.html  |10 +-
 .../RawAsyncTable.CoprocessorCallable.html  | 4 +-
 .../RawAsyncTable.CoprocessorCallback.html  |14 +-
 .../hadoop/hbase/client/RawAsyncTable.html  |98 +-
 .../RawAsyncTable.CoprocessorCallable.html  |22 +-
 .../RawAsyncTable.CoprocessorCallback.html  |22 +-
 .../RawAsyncTable.CoprocessorCallable.html  |   429 +-
 .../RawAsyncTable.CoprocessorCallback.html  |   429 +-
 .../hadoop/hbase/client/RawAsyncTable.html  |   429 +-
 book.html   | 6 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 29900 -
 checkstyle.rss  |74 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html |10 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 8 +-
 devapidocs/allclasses-frame.html| 4 +
 devapidocs/allclasses-noframe.html  | 4 +
 devapidocs/constant-values.html |92 +-
 devapidocs/index-all.html   |   198 +-
 .../hadoop/hbase/backup/package-tree.html   | 4 +-
 .../hadoop/hbase/class-use/TableName.html   | 4 +-
 .../RawAsyncTable.CoprocessorCallable.html  | 4 +-
 .../RawAsyncTable.CoprocessorCallback.html  |18 +-
 ...RawAsyncTable.CoprocessorServiceBuilder.html |   318 +
 .../hadoop/hbase/client/RawAsyncTable.html  |   105 +-
 ...syncTableImpl.CheckAndMutateBuilderImpl.html |28 +-
 .../client/RawAsyncTableImpl.Converter.html | 8 +-
 ...TableImpl.CoprocessorServiceBuilderImpl.html |   459 +
 .../RawAsyncTableImpl.NoncedConverter.html  | 8 +-
 .../hbase/client/RawAsyncTableImpl.RpcCall.html | 4 +-
 .../hadoop/hbase/client/RawAsyncTableImpl.html  |   167 +-
 .../RawAsyncTable.CoprocessorCallable.html  |64 +-
 .../RawAsyncTable.CoprocessorCallback.html  |56 +-
 ...RawAsyncTable.CoprocessorServiceBuilder.html |   208 +
 ...TableImpl.CoprocessorServiceBuilderImpl.html |   173 +
 ...ationClient.AbstractAggregationCallback.html |22 +-
 .../coprocessor/AsyncAggregationClient.html |47 +-
 .../hadoop/hbase/client/package-frame.html  | 1 +
 .../hadoop/hbase/client/package-summary.html|   159 +-
 .../hadoop/hbase/client/package-tree.html   |28 +-
 .../apache/hadoop/hbase/client/package-use.html | 9 +
 .../hadoop/hbase/executor/package-tree.html | 2 +-
 .../hadoop/hbase/filter/package-tree.html   | 8 +-
 .../hbase/fs/HFileSystem.ReorderBlocks.html | 4 +-
 .../hbase/fs/HFileSystem.ReorderWALBlocks.html  | 6 +-
 .../org/apache/hadoop/hbase/fs/HFileSystem.html |66 +-
 .../hadoop/hbase/http/HttpServer.Builder.html   |94 +-
 .../hbase/http/HttpServer.ListenerInfo.html | 8 +-
 ...Server.QuotingInputFilter.RequestQuoter.html |18 +-
 .../http/HttpServer.QuotingInputFilter.html |14 +-
 .../hbase/http/HttpServer.StackServlet.html | 8 +-
 .../apache/hadoop/hbase/http/HttpServer.html|   207 +-
 .../hbase/io/asyncfs/AsyncFSOutputHelper.html   |10 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 8 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 4 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 4 +-
 .../master/HMaster.InitializationMonitor.html   |20 +-
 .../hbase/master/HMaster.RedirectServlet.html   |12 +-
 .../org/apache/hadoop/hbase/master/HMaster.html |   450 +-
 .../hadoop/hbase/master/package-tree.html   | 6 +-
 .../procedure/MasterProcedureConstants.html |28 +-
 .../hbase/master/procedure/package-tree.html| 2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |16 +-
 .../hadoop/hbase/procedure2/package-tree.html   | 4 +-
 .../wal/WALProcedureStore.LeaseRecovery.html| 4 +-
 .../store/wal/WALProcedureStore.PushType.html   |12 +-
 .../wal/WALProcedureStore.SyncMetrics.html  |24 +-
 .../procedure2/store/wal/W

[23/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/util/class-use/CommonFSUtils.StreamCapabilities.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/class-use/CommonFSUtils.StreamCapabilities.html
 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/CommonFSUtils.StreamCapabilities.html
new file mode 100644
index 000..a533420
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/CommonFSUtils.StreamCapabilities.html
@@ -0,0 +1,125 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+Uses of Class 
org.apache.hadoop.hbase.util.CommonFSUtils.StreamCapabilities (Apache HBase 
3.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of 
Classorg.apache.hadoop.hbase.util.CommonFSUtils.StreamCapabilities
+
+No usage of 
org.apache.hadoop.hbase.util.CommonFSUtils.StreamCapabilities
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+Copyright © 2007–2017 https://www.apache.org/";>The Apache Software Foundation. All rights 
reserved.
+
+

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/util/class-use/CommonFSUtils.StreamLacksCapabilityException.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/util/class-use/CommonFSUtils.StreamLacksCapabilityException.html
 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/CommonFSUtils.StreamLacksCapabilityException.html
new file mode 100644
index 000..a39f97e
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/util/class-use/CommonFSUtils.StreamLacksCapabilityException.html
@@ -0,0 +1,263 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+Uses of Class 
org.apache.hadoop.hbase.util.CommonFSUtils.StreamLacksCapabilityException 
(Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev
+Next
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+
+
+
+Uses of 
Classorg.apache.hadoop.hbase.util.CommonFSUtils.StreamLacksCapabilityException
+
+
+
+
+
+Packages that use CommonFSUtils.StreamLacksCapabilityException 
+
+Package
+Description
+
+
+
+org.apache.hadoop.hbase.io.asyncfs
+ 
+
+
+org.apache.hadoop.hbase.regionserver.wal
+ 
+
+
+org.apache.hadoop.hbase.wal
+ 
+
+
+
+
+
+
+
+
+
+Uses of CommonFSUtils.StreamLacksCapabilityException 
in org.apache.hadoop.hbase.io.asyncfs
+
+Methods in org.apache.hadoop.hbase.io.asyncfs
 that throw CommonFSUtils.StreamLacksCapabilityException 
+
+Modifier and Type
+Method and Description
+
+
+
+static AsyncFSOutput
+AsyncFSOutputHelper.createOutput(org.apache.hadoop.fs.FileSystem fs,
+org.apache.hadoop.fs.Path f,
+boolean overwrite,
+boolean createParent,
+short replication,
+long blockSize,
+
org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop eventLoop,
+http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class channelClass)
+Create FanOutOneBlockAsyncDFSOutput
 for DistributedFileSystem, and a simple
+ implementation for other FileSystem which wraps around a 
FSDataOutput

[41/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.html 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.html
index ebb56d4..2ca67b7 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.html
@@ -49,7 +49,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-Prev Class
+Prev Class
 Next Class
 
 
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-class RawAsyncTableImpl
+class RawAsyncTableImpl
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements RawAsyncTable
 The implementation of RawAsyncTable.
@@ -145,10 +145,14 @@ implements RawAsyncTableImpl.Converter 
 
 
+private class 
+RawAsyncTableImpl.CoprocessorServiceBuilderImpl 
+
+
 private static interface 
 RawAsyncTableImpl.NoncedConverter 
 
-
+
 private static interface 
 RawAsyncTableImpl.RpcCall 
 
@@ -158,7 +162,7 @@ implements RawAsyncTable
-RawAsyncTable.CoprocessorCallable, RawAsyncTable.CoprocessorCallback
+RawAsyncTable.CoprocessorCallable, RawAsyncTable.CoprocessorCallback, RawAsyncTable.CoprocessorServiceBuilder
 
 
 
@@ -305,16 +309,11 @@ implements 
- void
-coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true";
 title="class or interface in 
java.util.function">Function stubMaker,
+ RawAsyncTable.CoprocessorServiceBuilder
+coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true";
 title="class or interface in 
java.util.function">Function stubMaker,
   RawAsyncTable.CoprocessorCallable callable,
-  byte[] startKey,
-  boolean startKeyInclusive,
-  byte[] endKey,
-  boolean endKeyInclusive,
   RawAsyncTable.CoprocessorCallback callback)
-Execute the given coprocessor call on the regions which are 
covered by the range from
- startKey and endKey.
+Execute a coprocessor call on the regions which are covered 
by a range.
 
 
 
@@ -513,13 +512,6 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--";
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-";
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--";
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--";
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--";
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#notify--";
 title="class or interface in java.lang">notify, http://docs.oracle.com/javase/8/docs/api/java/lang
 /Object.html?is-external=true#notifyAll--" title="class or interface in 
java.lang">notifyAll, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#toString--";
 title="class or interface in java.lang">toString, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait--";
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-";
 title="class or interface in java.lang">wait, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#wait-long-int-";
 title="class or interface in java.lang">wait
 
 
-
-
-
-Methods inherited from interface org.apache.hadoop.hbase.client.RawAsyncTable
-coprocessorService
-
-
 
 
 
@@ -546,7 +538,7 @@ implements 
 
 conn
-private final AsyncConnectionImpl conn
+private final AsyncConnectionImpl conn
 
 
 
@@ -555,7 +547,7 @@ implements 
 
 tableName
-private final TableName tableName
+private final TableName tableName
 
 
 
@@ -564,7 +556,7 @@ implements 
 
 defaultScannerCaching
-private final int defaultScannerCaching
+private final int defaultScannerCaching
 
 
 
@@ -573,7 +565,7 @@ implements 
 
 defaultScannerMaxResultSize
-private final long defaultScannerMaxResultSize
+private final long defaultScannerMaxResultSize
 
 
 
@@ -582,7 +574,7 @@ implements 
 
 rpcTimeoutNs
-private final long rpcTimeoutNs
+private final long rpcTimeoutNs
 
 
 
@@ -591,7 +583,7 @@ implements 
 
 readRpcTimeoutNs
-private final 

[16/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.NoncedConverter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.NoncedConverter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.NoncedConverter.html
index 0d33cae..19fa457 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.NoncedConverter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.NoncedConverter.html
@@ -26,564 +26,607 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
java.util.stream.Collectors.toList;
-021import static 
org.apache.hadoop.hbase.HConstants.EMPTY_END_ROW;
-022import static 
org.apache.hadoop.hbase.HConstants.EMPTY_START_ROW;
-023import static 
org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
-024import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+021import static 
org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
+022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+023
+024import com.google.protobuf.RpcChannel;
 025
-026import com.google.protobuf.RpcChannel;
-027
-028import java.io.IOException;
-029import java.util.ArrayList;
-030import java.util.Arrays;
-031import java.util.List;
-032import java.util.Optional;
-033import 
java.util.concurrent.CompletableFuture;
-034import java.util.concurrent.TimeUnit;
-035import 
java.util.concurrent.atomic.AtomicBoolean;
-036import 
java.util.concurrent.atomic.AtomicInteger;
-037import java.util.function.Function;
-038
-039import 
org.apache.hadoop.conf.Configuration;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.HRegionLocation;
-042import 
org.apache.hadoop.hbase.TableName;
-043import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
-044import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-045import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-046import 
org.apache.hadoop.hbase.util.Bytes;
-047import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-048import 
org.apache.yetus.audience.InterfaceAudience;
-049
-050import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-051import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-052import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-053import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-054import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
-055import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
-056import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest;
-057import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse;
-058import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
-059import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse;
-060import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
-061import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse;
-062import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
-064
-065/**
-066 * The implementation of RawAsyncTable.
-067 */
-068@InterfaceAudience.Private
-069class RawAsyncTableImpl implements 
RawAsyncTable {
+026import java.io.IOException;
+027import java.util.ArrayList;
+028import java.util.Arrays;
+029import java.util.List;
+030import 
java.util.concurrent.CompletableFuture;
+031import java.util.concurrent.TimeUnit;
+032import 
java.util.concurrent.atomic.AtomicBoolean;
+033import 
java.util.concurrent.atomic.AtomicInteger;
+034import java.util.function.Function;
+035
+036import 
org.apache.hadoop.conf.Configuration;
+037import 
org.apache.hadoop.hbase.CompareOperator;
+038import 
org.apache.hadoop.hbase.HConstants;
+039import 
org.apache.hadoop.hbase.HRegionLocation;
+040import 
org.apache.hadoop.hbase.TableName;
+041import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
+042import 
org.apache.hadoop.hbase.filter.BinaryComparator;
+043import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+044import 
org.apache.hadoop.hbase.util.Bytes;
+045import 
org.apache.hadoop.hbase.util.ReflectionUtils;
+046import 
org.apache.yetus.audience.InterfaceAudience;
+047
+048import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+049import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
+050import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+051import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
+052import 
org.apache.hadoop.hbase.shad

[32/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/class-use/WALProcedureStore.LeaseRecovery.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/class-use/WALProcedureStore.LeaseRecovery.html
 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/class-use/WALProcedureStore.LeaseRecovery.html
index bca87cd..3382458 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/class-use/WALProcedureStore.LeaseRecovery.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/procedure2/store/wal/class-use/WALProcedureStore.LeaseRecovery.html
@@ -137,16 +137,13 @@
 
 
 
-WALProcedureStore(org.apache.hadoop.conf.Configuration conf,
- org.apache.hadoop.fs.FileSystem fs,
+WALProcedureStore(org.apache.hadoop.conf.Configuration conf,
  org.apache.hadoop.fs.Path walDir,
  org.apache.hadoop.fs.Path walArchiveDir,
  WALProcedureStore.LeaseRecovery leaseRecovery) 
 
 
-WALProcedureStore(org.apache.hadoop.conf.Configuration conf,
- org.apache.hadoop.fs.FileSystem fs,
- org.apache.hadoop.fs.Path walDir,
+WALProcedureStore(org.apache.hadoop.conf.Configuration conf,
  WALProcedureStore.LeaseRecovery leaseRecovery) 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index fb637c6..becb827 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -206,11 +206,11 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.quotas.QuotaScope
 org.apache.hadoop.hbase.quotas.OperationQuota.OperationType
 org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
-org.apache.hadoop.hbase.quotas.ThrottleType
 org.apache.hadoop.hbase.quotas.QuotaType
+org.apache.hadoop.hbase.quotas.QuotaScope
+org.apache.hadoop.hbase.quotas.ThrottleType
 org.apache.hadoop.hbase.quotas.ThrottlingException.Type
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index 28f6269..175cbcb 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -702,19 +702,19 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.regionserver.HRegion.FlushResult.Result
-org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.StepDirection
-org.apache.hadoop.hbase.regionserver.ScanType
-org.apache.hadoop.hbase.regionserver.Region.Operation
 org.apache.hadoop.hbase.regionserver.FlushType
+org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
+org.apache.hadoop.hbase.regionserver.Region.Operation
 org.apache.hadoop.hbase.regionserver.CompactingMemStore.IndexType
+org.apache.hadoop.hbase.regionserver.ScanType
+org.apache.hadoop.hbase.regionserver.TimeRangeTracker.Type
 org.apache.hadoop.hbase.regionserver.ScannerContext.NextState
+org.apache.hadoop.hbase.regionserver.MemStoreCompactionStrategy.Action
 org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
-org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
-org.apache.hadoop.hbase.regionserver.TimeRangeTracker.Type
-org.apache.hadoop.hbase.regionserver.BloomType
 org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
-org.apache.hadoop.hbase.regionserver.MemStoreCompactionStrategy.Action
+org.apache.hadoop.hbase.regionserver.HRegion.FlushResult.Result
+org.apache.hadoop.hbase.regionserver.Default

[43/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
index 970bda5..fa4dba6 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/package-tree.html
@@ -167,10 +167,10 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true";
 title="class or interface in java.lang">Comparable, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true";
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.backup.BackupInfo.BackupState
-org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand
 org.apache.hadoop.hbase.backup.BackupType
 org.apache.hadoop.hbase.backup.BackupInfo.BackupPhase
+org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand
+org.apache.hadoop.hbase.backup.BackupInfo.BackupState
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html 
b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
index a42bd7e..38264a7 100644
--- a/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
+++ b/devapidocs/org/apache/hadoop/hbase/class-use/TableName.html
@@ -10288,7 +10288,7 @@ service.
 
 
 static TableName
-FSUtils.getTableName(org.apache.hadoop.fs.Path tablePath)
+CommonFSUtils.getTableName(org.apache.hadoop.fs.Path tablePath)
 Returns the TableName object representing
  the table directory under
  path rootdir
@@ -10431,7 +10431,7 @@ service.
 
 
 static org.apache.hadoop.fs.Path
-FSUtils.getTableDir(org.apache.hadoop.fs.Path rootdir,
+CommonFSUtils.getTableDir(org.apache.hadoop.fs.Path rootdir,
TableName tableName)
 Returns the Path object representing the table 
directory under
  path rootdir

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallable.html
 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallable.html
index 25f4cd7..6b66fc2 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallable.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallable.html
@@ -111,7 +111,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Public
  http://docs.oracle.com/javase/8/docs/api/java/lang/FunctionalInterface.html?is-external=true";
 title="class or interface in java.lang">@FunctionalInterface
-public static interface RawAsyncTable.CoprocessorCallable
+public static interface RawAsyncTable.CoprocessorCallable
 Delegate to a protobuf rpc call.
  
  Usually, it is just a simple lambda expression, like:
@@ -182,7 +182,7 @@ public static interface 
 
 call
-void call(S stub,
+void call(S stub,
   com.google.protobuf.RpcController controller,
   com.google.protobuf.RpcCallback rpcCallback)
 Represent the actual protobuf rpc call.

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallback.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallback.html
 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallback.html
index fcaea10..115b3d0 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallback.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallback.html
@@ -50,7 +50,7 @@ var activeTableTab = "activeTableTab";
 
 
 Prev Class
-Next Class
+Next Class
 
 
 Frames
@@ -110,7 +110,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Public
-public static interface RawAsyncTable.CoprocessorCallback
+public static interface RawAsyncTable.CoprocessorCallback
 The callback when we want to execute a coprocessor call on 
a range of regions.
  
  As the locating itself also takes some time, the implementation may want to 
send rpc calls on
@@ -118,8 +118,8 @@ public static interface onComplete()
 which is used to tell you that we have
  passed all the return values to you(through the onRegionComplete(RegionInfo,
 Object)
  or 

[02/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
index 676c748..c6e457f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/asyncfs/AsyncFSOutputHelper.html
@@ -46,136 +46,145 @@
 038import 
org.apache.yetus.audience.InterfaceAudience;
 039import 
org.apache.hadoop.hbase.io.ByteArrayOutputStream;
 040import 
org.apache.hadoop.hbase.util.CancelableProgressable;
-041import 
org.apache.hadoop.hdfs.DistributedFileSystem;
-042import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-043
-044/**
-045 * Helper class for creating 
AsyncFSOutput.
-046 */
-047@InterfaceAudience.Private
-048public final class AsyncFSOutputHelper 
{
-049
-050  private AsyncFSOutputHelper() {
-051  }
-052
-053  /**
-054   * Create {@link 
FanOutOneBlockAsyncDFSOutput} for {@link DistributedFileSystem}, and a simple
-055   * implementation for other {@link 
FileSystem} which wraps around a {@link FSDataOutputStream}.
-056   */
-057  public static AsyncFSOutput 
createOutput(FileSystem fs, Path f, boolean overwrite,
-058  boolean createParent, short 
replication, long blockSize, EventLoop eventLoop,
-059  Class 
channelClass) throws IOException {
-060if (fs instanceof 
DistributedFileSystem) {
-061  return 
FanOutOneBlockAsyncDFSOutputHelper.createOutput((DistributedFileSystem) fs, 
f,
-062overwrite, createParent, 
replication, blockSize, eventLoop, channelClass);
-063}
-064final FSDataOutputStream fsOut;
-065int bufferSize = 
fs.getConf().getInt(CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY,
-066  
CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT);
-067if (createParent) {
-068  fsOut = fs.create(f, overwrite, 
bufferSize, replication, blockSize, null);
-069} else {
-070  fsOut = fs.createNonRecursive(f, 
overwrite, bufferSize, replication, blockSize, null);
-071}
-072final ExecutorService flushExecutor 
=
-073
Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setDaemon(true)
-074
.setNameFormat("AsyncFSOutputFlusher-" + f.toString().replace("%", 
"%%")).build());
-075return new AsyncFSOutput() {
-076
-077  private final ByteArrayOutputStream 
out = new ByteArrayOutputStream();
-078
-079  @Override
-080  public void write(final byte[] b, 
final int off, final int len) {
-081if (eventLoop.inEventLoop()) {
-082  out.write(b, off, len);
-083} else {
-084  eventLoop.submit(() -> 
out.write(b, off, len)).syncUninterruptibly();
-085}
-086  }
+041import 
org.apache.hadoop.hbase.util.CommonFSUtils;
+042import 
org.apache.hadoop.hdfs.DistributedFileSystem;
+043import 
org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+044
+045/**
+046 * Helper class for creating 
AsyncFSOutput.
+047 */
+048@InterfaceAudience.Private
+049public final class AsyncFSOutputHelper 
{
+050
+051  private AsyncFSOutputHelper() {
+052  }
+053
+054  /**
+055   * Create {@link 
FanOutOneBlockAsyncDFSOutput} for {@link DistributedFileSystem}, and a simple
+056   * implementation for other {@link 
FileSystem} which wraps around a {@link FSDataOutputStream}.
+057   */
+058  public static AsyncFSOutput 
createOutput(FileSystem fs, Path f, boolean overwrite,
+059  boolean createParent, short 
replication, long blockSize, EventLoop eventLoop,
+060  Class 
channelClass)
+061  throws IOException, 
CommonFSUtils.StreamLacksCapabilityException {
+062if (fs instanceof 
DistributedFileSystem) {
+063  return 
FanOutOneBlockAsyncDFSOutputHelper.createOutput((DistributedFileSystem) fs, 
f,
+064overwrite, createParent, 
replication, blockSize, eventLoop, channelClass);
+065}
+066final FSDataOutputStream fsOut;
+067int bufferSize = 
fs.getConf().getInt(CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY,
+068  
CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT);
+069if (createParent) {
+070  fsOut = fs.create(f, overwrite, 
bufferSize, replication, blockSize, null);
+071} else {
+072  fsOut = fs.createNonRecursive(f, 
overwrite, bufferSize, replication, blockSize, null);
+073}
+074// After we create the stream but 
before we attempt to use it at all
+075// ensure that we can provide the 
level of data safety we're configured
+076// to provide.
+077if 
(!(CommonFSUtils.hasCapability(fsOut, "hflush") &&
+078
CommonFSUtils.hasCapability(fsOut, "hsync"))) {
+079  throw new 
CommonFSUtils.StreamLacksCapabilityException("hflush and hsync");
+080}
+081fina

[24/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/util/FSUtils.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.html 
b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.html
index a0f691a..b60b175 100644
--- a/devapidocs/org/apache/hadoop/hbase/util/FSUtils.html
+++ b/devapidocs/org/apache/hadoop/hbase/util/FSUtils.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9,"i40":9,"i41":9,"i42":9,"i43":9,"i44":9,"i45":9,"i46":9,"i47":9,"i48":9,"i49":9,"i50":9,"i51":9,"i52":9,"i53":9,"i54":9,"i55":9,"i56":9,"i57":9,"i58":9,"i59":9,"i60":9,"i61":9,"i62":9,"i63":9,"i64":9,"i65":9,"i66":9,"i67":9,"i68":6,"i69":9,"i70":9,"i71":9,"i72":9,"i73":9,"i74":9,"i75":9,"i76":9,"i77":9,"i78":9,"i79":9,"i80":9,"i81":9,"i82":9,"i83":9,"i84":9};
+var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9,"i40":6,"i41":9,"i42":9,"i43":9,"i44":9,"i45":9,"i46":9,"i47":9,"i48":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],4:["t3","Abstract 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -100,10 +100,15 @@ var activeTableTab = "activeTableTab";
 http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
 
 
+org.apache.hadoop.hbase.util.CommonFSUtils
+
+
 org.apache.hadoop.hbase.util.FSUtils
 
 
 
+
+
 
 
 
@@ -114,8 +119,8 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public abstract class FSUtils
-extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
+public abstract class FSUtils
+extends CommonFSUtils
 Utility methods for interacting with the underlying file 
system.
 
 
@@ -188,6 +193,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
+
+
+
+
+Nested classes/interfaces inherited from 
class org.apache.hadoop.hbase.util.CommonFSUtils
+CommonFSUtils.StreamLacksCapabilityException
+
 
 
 
@@ -207,23 +219,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 DEFAULT_THREAD_POOLSIZE 
 
 
-static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
-FULL_RWX_PERMISSIONS
-Full access permissions (starting point for a umask)
-
-
-
 private static 
org.apache.commons.logging.Log
 LOG 
 
-
+
 private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 THREAD_POOLSIZE 
 
-
-private static http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in 
java.util">MapBoolean>
-warningMap 
-
 
 static boolean
 WINDOWS
@@ -231,6 +233,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
+
+
+
+
+Fields inherited from class org.apache.hadoop.hbase.util.CommonFSUtils
+FULL_RWX_PERMISSIONS,
 HBASE_WAL_DIR
+
 
 
 
@@ -348,130 +357,60 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-static 
org.apache.hadoop.fs.FSDataOutputStream
-create(org.apache.hadoop.fs.FileSystem fs,
-  org.apache.hadoop.fs.Path path,
-  org.apache.hadoop.fs.permission.FsPermission perm,
-  boolean overwrite)
-Create the specified file on the filesystem.
-
-
-
-static boolean
-delete(org.apache.hadoop.fs.FileSystem fs,
-  org.apache.hadoop.fs.Path path,
-  boolean recursive)
-Calls fs.delete() and returns the value returned by the 
fs.delete()
-
-
-
-static boolean
-deleteDirectory(org.apache.hadoop.fs.FileSystem fs,
-   org.apache.hadoop.fs.Path dir)
-Delete if exists.
-
-
-
 static boolean
 deleteRegionDir(org.apache.hadoop.conf.Configuration conf,
HRegionInfo hri)
 Delete the region directory if exists.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
 filterFileStatuses(org.apache.hadoop.fs.FileStatus[] input,
  

[15/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.RpcCall.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.RpcCall.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.RpcCall.html
index 0d33cae..19fa457 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.RpcCall.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.RpcCall.html
@@ -26,564 +26,607 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
java.util.stream.Collectors.toList;
-021import static 
org.apache.hadoop.hbase.HConstants.EMPTY_END_ROW;
-022import static 
org.apache.hadoop.hbase.HConstants.EMPTY_START_ROW;
-023import static 
org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
-024import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+021import static 
org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
+022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+023
+024import com.google.protobuf.RpcChannel;
 025
-026import com.google.protobuf.RpcChannel;
-027
-028import java.io.IOException;
-029import java.util.ArrayList;
-030import java.util.Arrays;
-031import java.util.List;
-032import java.util.Optional;
-033import 
java.util.concurrent.CompletableFuture;
-034import java.util.concurrent.TimeUnit;
-035import 
java.util.concurrent.atomic.AtomicBoolean;
-036import 
java.util.concurrent.atomic.AtomicInteger;
-037import java.util.function.Function;
-038
-039import 
org.apache.hadoop.conf.Configuration;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.HRegionLocation;
-042import 
org.apache.hadoop.hbase.TableName;
-043import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
-044import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-045import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-046import 
org.apache.hadoop.hbase.util.Bytes;
-047import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-048import 
org.apache.yetus.audience.InterfaceAudience;
-049
-050import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-051import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-052import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-053import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-054import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
-055import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
-056import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest;
-057import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse;
-058import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
-059import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse;
-060import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
-061import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse;
-062import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
-064
-065/**
-066 * The implementation of RawAsyncTable.
-067 */
-068@InterfaceAudience.Private
-069class RawAsyncTableImpl implements 
RawAsyncTable {
+026import java.io.IOException;
+027import java.util.ArrayList;
+028import java.util.Arrays;
+029import java.util.List;
+030import 
java.util.concurrent.CompletableFuture;
+031import java.util.concurrent.TimeUnit;
+032import 
java.util.concurrent.atomic.AtomicBoolean;
+033import 
java.util.concurrent.atomic.AtomicInteger;
+034import java.util.function.Function;
+035
+036import 
org.apache.hadoop.conf.Configuration;
+037import 
org.apache.hadoop.hbase.CompareOperator;
+038import 
org.apache.hadoop.hbase.HConstants;
+039import 
org.apache.hadoop.hbase.HRegionLocation;
+040import 
org.apache.hadoop.hbase.TableName;
+041import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
+042import 
org.apache.hadoop.hbase.filter.BinaryComparator;
+043import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+044import 
org.apache.hadoop.hbase.util.Bytes;
+045import 
org.apache.hadoop.hbase.util.ReflectionUtils;
+046import 
org.apache.yetus.audience.InterfaceAudience;
+047
+048import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+049import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
+050import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+051import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
+052import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
+053impor

[18/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.Converter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.Converter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.Converter.html
index 0d33cae..19fa457 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.Converter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.Converter.html
@@ -26,564 +26,607 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
java.util.stream.Collectors.toList;
-021import static 
org.apache.hadoop.hbase.HConstants.EMPTY_END_ROW;
-022import static 
org.apache.hadoop.hbase.HConstants.EMPTY_START_ROW;
-023import static 
org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
-024import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+021import static 
org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
+022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+023
+024import com.google.protobuf.RpcChannel;
 025
-026import com.google.protobuf.RpcChannel;
-027
-028import java.io.IOException;
-029import java.util.ArrayList;
-030import java.util.Arrays;
-031import java.util.List;
-032import java.util.Optional;
-033import 
java.util.concurrent.CompletableFuture;
-034import java.util.concurrent.TimeUnit;
-035import 
java.util.concurrent.atomic.AtomicBoolean;
-036import 
java.util.concurrent.atomic.AtomicInteger;
-037import java.util.function.Function;
-038
-039import 
org.apache.hadoop.conf.Configuration;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.HRegionLocation;
-042import 
org.apache.hadoop.hbase.TableName;
-043import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
-044import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-045import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-046import 
org.apache.hadoop.hbase.util.Bytes;
-047import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-048import 
org.apache.yetus.audience.InterfaceAudience;
-049
-050import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-051import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-052import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-053import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-054import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
-055import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
-056import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest;
-057import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse;
-058import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
-059import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse;
-060import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
-061import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse;
-062import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
-064
-065/**
-066 * The implementation of RawAsyncTable.
-067 */
-068@InterfaceAudience.Private
-069class RawAsyncTableImpl implements 
RawAsyncTable {
+026import java.io.IOException;
+027import java.util.ArrayList;
+028import java.util.Arrays;
+029import java.util.List;
+030import 
java.util.concurrent.CompletableFuture;
+031import java.util.concurrent.TimeUnit;
+032import 
java.util.concurrent.atomic.AtomicBoolean;
+033import 
java.util.concurrent.atomic.AtomicInteger;
+034import java.util.function.Function;
+035
+036import 
org.apache.hadoop.conf.Configuration;
+037import 
org.apache.hadoop.hbase.CompareOperator;
+038import 
org.apache.hadoop.hbase.HConstants;
+039import 
org.apache.hadoop.hbase.HRegionLocation;
+040import 
org.apache.hadoop.hbase.TableName;
+041import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
+042import 
org.apache.hadoop.hbase.filter.BinaryComparator;
+043import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+044import 
org.apache.hadoop.hbase.util.Bytes;
+045import 
org.apache.hadoop.hbase.util.ReflectionUtils;
+046import 
org.apache.yetus.audience.InterfaceAudience;
+047
+048import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+049import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
+050import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+051import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
+052import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;

[10/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderWALBlocks.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderWALBlocks.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderWALBlocks.html
index 1553cc6..1daa9e8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderWALBlocks.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderWALBlocks.html
@@ -73,434 +73,431 @@
 065public class HFileSystem extends 
FilterFileSystem {
 066  public static final Log LOG = 
LogFactory.getLog(HFileSystem.class);
 067
-068  /** Parameter name for HBase WAL 
directory */
-069  public static final String 
HBASE_WAL_DIR = "hbase.wal.dir";
-070
-071  private final FileSystem noChecksumFs;  
 // read hfile data from storage
-072  private final boolean 
useHBaseChecksum;
-073  private static volatile byte 
unspecifiedStoragePolicyId = Byte.MIN_VALUE;
-074
-075  /**
-076   * Create a FileSystem object for HBase 
regionservers.
-077   * @param conf The configuration to be 
used for the filesystem
-078   * @param useHBaseChecksum if true, 
then use
-079   *checksum verfication in 
hbase, otherwise
-080   *delegate checksum 
verification to the FileSystem.
-081   */
-082  public HFileSystem(Configuration conf, 
boolean useHBaseChecksum)
-083throws IOException {
-084
-085// Create the default filesystem with 
checksum verification switched on.
-086// By default, any operation to this 
FilterFileSystem occurs on
-087// the underlying filesystem that has 
checksums switched on.
-088this.fs = FileSystem.get(conf);
-089this.useHBaseChecksum = 
useHBaseChecksum;
-090
-091fs.initialize(getDefaultUri(conf), 
conf);
-092
-093// disable checksum verification for 
local fileSystem, see HBASE-11218
-094if (fs instanceof LocalFileSystem) 
{
-095  fs.setWriteChecksum(false);
-096  fs.setVerifyChecksum(false);
-097}
-098
-099addLocationsOrderInterceptor(conf);
-100
-101// If hbase checksum verification is 
switched on, then create a new
-102// filesystem object that has cksum 
verification turned off.
-103// We will avoid verifying checksums 
in the fs client, instead do it
-104// inside of hbase.
-105// If this is the local file system 
hadoop has a bug where seeks
-106// do not go to the correct location 
if setVerifyChecksum(false) is called.
-107// This manifests itself in that 
incorrect data is read and HFileBlocks won't be able to read
-108// their header magic numbers. See 
HBASE-5885
-109if (useHBaseChecksum && !(fs 
instanceof LocalFileSystem)) {
-110  conf = new Configuration(conf);
-111  
conf.setBoolean("dfs.client.read.shortcircuit.skip.checksum", true);
-112  this.noChecksumFs = 
maybeWrapFileSystem(newInstanceFileSystem(conf), conf);
-113  
this.noChecksumFs.setVerifyChecksum(false);
-114} else {
-115  this.noChecksumFs = 
maybeWrapFileSystem(fs, conf);
-116}
+068  private final FileSystem noChecksumFs;  
 // read hfile data from storage
+069  private final boolean 
useHBaseChecksum;
+070  private static volatile byte 
unspecifiedStoragePolicyId = Byte.MIN_VALUE;
+071
+072  /**
+073   * Create a FileSystem object for HBase 
regionservers.
+074   * @param conf The configuration to be 
used for the filesystem
+075   * @param useHBaseChecksum if true, 
then use
+076   *checksum verfication in 
hbase, otherwise
+077   *delegate checksum 
verification to the FileSystem.
+078   */
+079  public HFileSystem(Configuration conf, 
boolean useHBaseChecksum)
+080throws IOException {
+081
+082// Create the default filesystem with 
checksum verification switched on.
+083// By default, any operation to this 
FilterFileSystem occurs on
+084// the underlying filesystem that has 
checksums switched on.
+085this.fs = FileSystem.get(conf);
+086this.useHBaseChecksum = 
useHBaseChecksum;
+087
+088fs.initialize(getDefaultUri(conf), 
conf);
+089
+090// disable checksum verification for 
local fileSystem, see HBASE-11218
+091if (fs instanceof LocalFileSystem) 
{
+092  fs.setWriteChecksum(false);
+093  fs.setVerifyChecksum(false);
+094}
+095
+096addLocationsOrderInterceptor(conf);
+097
+098// If hbase checksum verification is 
switched on, then create a new
+099// filesystem object that has cksum 
verification turned off.
+100// We will avoid verifying checksums 
in the fs client, instead do it
+101// inside of hbase.
+102// If this is the local file system 
hadoop has a bug where seeks
+103// do not go to the correct location 
if setVerifyChecksum(false) is called.
+104// This manifests itself in that 
incorrect data is read and HFileBlocks won't be able to read
+105// the

[17/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CoprocessorServiceBuilderImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CoprocessorServiceBuilderImpl.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CoprocessorServiceBuilderImpl.html
new file mode 100644
index 000..19fa457
--- /dev/null
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CoprocessorServiceBuilderImpl.html
@@ -0,0 +1,693 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+Source code
+
+
+
+
+001/**
+002 * Licensed to the Apache Software 
Foundation (ASF) under one
+003 * or more contributor license 
agreements.  See the NOTICE file
+004 * distributed with this work for 
additional information
+005 * regarding copyright ownership.  The 
ASF licenses this file
+006 * to you under the Apache License, 
Version 2.0 (the
+007 * "License"); you may not use this file 
except in compliance
+008 * with the License.  You may obtain a 
copy of the License at
+009 *
+010 * 
http://www.apache.org/licenses/LICENSE-2.0
+011 *
+012 * Unless required by applicable law or 
agreed to in writing, software
+013 * distributed under the License is 
distributed on an "AS IS" BASIS,
+014 * WITHOUT WARRANTIES OR CONDITIONS OF 
ANY KIND, either express or implied.
+015 * See the License for the specific 
language governing permissions and
+016 * limitations under the License.
+017 */
+018package org.apache.hadoop.hbase.client;
+019
+020import static 
java.util.stream.Collectors.toList;
+021import static 
org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
+022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+023
+024import com.google.protobuf.RpcChannel;
+025
+026import java.io.IOException;
+027import java.util.ArrayList;
+028import java.util.Arrays;
+029import java.util.List;
+030import 
java.util.concurrent.CompletableFuture;
+031import java.util.concurrent.TimeUnit;
+032import 
java.util.concurrent.atomic.AtomicBoolean;
+033import 
java.util.concurrent.atomic.AtomicInteger;
+034import java.util.function.Function;
+035
+036import 
org.apache.hadoop.conf.Configuration;
+037import 
org.apache.hadoop.hbase.CompareOperator;
+038import 
org.apache.hadoop.hbase.HConstants;
+039import 
org.apache.hadoop.hbase.HRegionLocation;
+040import 
org.apache.hadoop.hbase.TableName;
+041import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
+042import 
org.apache.hadoop.hbase.filter.BinaryComparator;
+043import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+044import 
org.apache.hadoop.hbase.util.Bytes;
+045import 
org.apache.hadoop.hbase.util.ReflectionUtils;
+046import 
org.apache.yetus.audience.InterfaceAudience;
+047
+048import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+049import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
+050import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+051import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
+052import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
+053import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
+054import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest;
+055import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse;
+056import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
+057import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse;
+058import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
+059import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse;
+060import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
+061import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
+062
+063/**
+064 * The implementation of RawAsyncTable.
+065 */
+066@InterfaceAudience.Private
+067class RawAsyncTableImpl implements 
RawAsyncTable {
+068
+069  private final AsyncConnectionImpl 
conn;
+070
+071  private final TableName tableName;
+072
+073  private final int 
defaultScannerCaching;
+074
+075  private final long 
defaultScannerMaxResultSize;
+076
+077  private final long rpcTimeoutNs;
+078
+079  private final long readRpcTimeoutNs;
+080
+081  private final long writeRpcTimeoutNs;
+082
+083  private final long 
operationTimeoutNs;
+084
+085  private final long scanTimeoutNs;
+086
+087  private final long pauseNs;
+088
+089  private final int maxAttempts;
+090
+091  private final int startLogErrorsCnt;
+092
+093  RawAsyncTableImpl(AsyncConnectionImpl 
conn, AsyncTableBuilderBase builder) {
+094this.conn = conn;
+095this.tableName = builder.tableName;
+096 

[20/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.html
index 22eba3b..ebf2220 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.html
@@ -46,214 +46,229 @@
 038 * 

039 * So, only experts that want to build high performance service should use this interface directly, 040 * especially for the {@link #scan(Scan, RawScanResultConsumer)} below. -041 *

-042 * TODO: For now the only difference between this interface and {@link AsyncTable} is the scan -043 * method. The {@link RawScanResultConsumer} exposes the implementation details of a scan(heartbeat) -044 * so it is not suitable for a normal user. If it is still the only difference after we implement -045 * most features of AsyncTable, we can think about merge these two interfaces. -046 * @since 2.0.0 -047 */ -048@InterfaceAudience.Public -049public interface RawAsyncTable extends AsyncTableBase { -050 -051 /** -052 * The basic scan API uses the observer pattern. All results that match the given scan object will -053 * be passed to the given {@code consumer} by calling {@code RawScanResultConsumer.onNext}. -054 * {@code RawScanResultConsumer.onComplete} means the scan is finished, and -055 * {@code RawScanResultConsumer.onError} means we hit an unrecoverable error and the scan is -056 * terminated. {@code RawScanResultConsumer.onHeartbeat} means the RS is still working but we can -057 * not get a valid result to call {@code RawScanResultConsumer.onNext}. This is usually because -058 * the matched results are too sparse, for example, a filter which almost filters out everything -059 * is specified. -060 *

-061 * Notice that, the methods of the given {@code consumer} will be called directly in the rpc -062 * framework's callback thread, so typically you should not do any time consuming work inside -063 * these methods, otherwise you will be likely to block at least one connection to RS(even more if -064 * the rpc framework uses NIO). -065 * @param scan A configured {@link Scan} object. -066 * @param consumer the consumer used to receive results. -067 */ -068 void scan(Scan scan, RawScanResultConsumer consumer); -069 -070 /** -071 * Delegate to a protobuf rpc call. -072 *

-073 * Usually, it is just a simple lambda expression, like: -074 * -075 *

-076   * 
-077   * (stub, controller, rpcCallback) 
-> {
-078   *   XXXRequest request = ...; // 
prepare the request
-079   *   stub.xxx(controller, request, 
rpcCallback);
-080   * }
-081   * 
-082   * 
-083 * -084 * And if you can prepare the {@code request} before calling the coprocessorService method, the -085 * lambda expression will be: -086 * -087 *
-088   * 
-089   * (stub, controller, rpcCallback) 
-> stub.xxx(controller, request, rpcCallback)
-090   * 
-091   * 
-092 */ -093 @InterfaceAudience.Public -094 @FunctionalInterface -095 interface CoprocessorCallable { -096 -097/** -098 * Represent the actual protobuf rpc call. -099 * @param stub the asynchronous stub -100 * @param controller the rpc controller, has already been prepared for you -101 * @param rpcCallback the rpc callback, has already been prepared for you -102 */ -103void call(S stub, RpcController controller, RpcCallback rpcCallback); -104 } -105 -106 /** -107 * Execute the given coprocessor call on the region which contains the given {@code row}. -108 *

-109 * The {@code stubMaker} is just a delegation to the {@code newStub} call. Usually it is only a -110 * one line lambda expression, like: -111 * -112 *

-113   * 
-114   * channel -> 
xxxService.newStub(channel)
-115   * 
-116   * 
-117 * -118 * @param stubMaker a delegation to the actual {@code newStub} call. -119 * @param callable a delegation to the actual protobuf rpc call. See the comment of -120 * {@link CoprocessorCallable} for more details. -121 * @param row The row key used to identify the remote region location -122 * @param the type of the asynchronous stub -123 * @param the type of the return value -124 * @return the return value of the protobuf rpc call, wrapped by a {@link CompletableFuture}. -125 * @see CoprocessorCallable -126 */ -127 CompletableFuture coprocessorService(Function stubMaker, -128 CoprocessorCallable callable, byte[] row); -129 -130 /** -131 * The callback when we want to execute a coprocessor call on a range of regions. -132 *

-133 * As


[29/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.IOErrorWithCause.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.IOErrorWithCause.html
 
b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.IOErrorWithCause.html
index ecda737..9aa8a6b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.IOErrorWithCause.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.IOErrorWithCause.html
@@ -137,7 +137,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class ThriftServerRunner.IOErrorWithCause
+public static class ThriftServerRunner.IOErrorWithCause
 extends org.apache.hadoop.hbase.thrift.generated.IOError
 
 See Also:
@@ -273,7 +273,7 @@ extends 
org.apache.hadoop.hbase.thrift.generated.IOError
 
 
 cause
-private http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable cause
+private http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable cause
 
 
 
@@ -290,7 +290,7 @@ extends 
org.apache.hadoop.hbase.thrift.generated.IOError
 
 
 IOErrorWithCause
-public IOErrorWithCause(http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable cause)
+public IOErrorWithCause(http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable cause)
 
 
 
@@ -307,7 +307,7 @@ extends 
org.apache.hadoop.hbase.thrift.generated.IOError
 
 
 getCause
-public http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable getCause()
+public http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable getCause()
 
 Overrides:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true#getCause--";
 title="class or interface in java.lang">getCause in 
class http://docs.oracle.com/javase/8/docs/api/java/lang/Throwable.html?is-external=true";
 title="class or interface in java.lang">Throwable
@@ -320,7 +320,7 @@ extends 
org.apache.hadoop.hbase.thrift.generated.IOError
 
 
 equals
-public boolean equals(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object other)
+public boolean equals(http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object other)
 
 Overrides:
 equals in 
class org.apache.hadoop.hbase.thrift.generated.IOError
@@ -333,7 +333,7 @@ extends 
org.apache.hadoop.hbase.thrift.generated.IOError
 
 
 hashCode
-public int hashCode()
+public int hashCode()
 
 Overrides:
 hashCode in 
class org.apache.hadoop.hbase.thrift.generated.IOError

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html 
b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html
index 0cf76c1..5e08f1c 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift/ThriftServerRunner.ImplType.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static enum ThriftServerRunner.ImplType
+static enum ThriftServerRunner.ImplType
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true";
 title="class or interface in java.lang">Enum
 An enum of server implementation selections
 
@@ -281,7 +281,7 @@ the order they are declared.
 
 
 HS_HA
-public static final ThriftServerRunner.ImplType 
HS_HA
+public static final ThriftServerRunner.ImplType 
HS_HA
 
 
 
@@ -290,7 +290,7 @@ the order they are declared.
 
 
 NONBLOCKING
-public static final ThriftServerRunner.ImplType 
NONBLOCKING
+public static final ThriftServerRunner.ImplType 
NONBLOCKING
 
 
 
@@ -299,7 +299,7 @@ the order they are declared.
 
 
 THREAD_POOL
-public static final ThriftServerRunner.ImplType 
THREAD_POOL
+public static final ThriftServerRunner.ImplType 
THREAD_POOL
 
 
 
@@ -308,7 +308,7 @@ the order they are declared.
 
 
 THREADED_SELECTOR
-public static final ThriftServerRunner.ImplType 
THREADED_SELECTOR
+public static final ThriftServerRunner.ImplType 
THREADED_SELECTOR
 
 
 
@@ -325,7 +325,7 @@ the order they are declared.
 
 
 DEFAULT
-public static final ThriftServerRunner.ImplType 
DEFAULT
+public static final ThriftServerRunner.ImplT

[48/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/apidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallback.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallback.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallback.html
index 22eba3b..ebf2220 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallback.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallback.html
@@ -46,214 +46,229 @@
 038 * 

039 * So, only experts that want to build high performance service should use this interface directly, 040 * especially for the {@link #scan(Scan, RawScanResultConsumer)} below. -041 *

-042 * TODO: For now the only difference between this interface and {@link AsyncTable} is the scan -043 * method. The {@link RawScanResultConsumer} exposes the implementation details of a scan(heartbeat) -044 * so it is not suitable for a normal user. If it is still the only difference after we implement -045 * most features of AsyncTable, we can think about merge these two interfaces. -046 * @since 2.0.0 -047 */ -048@InterfaceAudience.Public -049public interface RawAsyncTable extends AsyncTableBase { -050 -051 /** -052 * The basic scan API uses the observer pattern. All results that match the given scan object will -053 * be passed to the given {@code consumer} by calling {@code RawScanResultConsumer.onNext}. -054 * {@code RawScanResultConsumer.onComplete} means the scan is finished, and -055 * {@code RawScanResultConsumer.onError} means we hit an unrecoverable error and the scan is -056 * terminated. {@code RawScanResultConsumer.onHeartbeat} means the RS is still working but we can -057 * not get a valid result to call {@code RawScanResultConsumer.onNext}. This is usually because -058 * the matched results are too sparse, for example, a filter which almost filters out everything -059 * is specified. -060 *

-061 * Notice that, the methods of the given {@code consumer} will be called directly in the rpc -062 * framework's callback thread, so typically you should not do any time consuming work inside -063 * these methods, otherwise you will be likely to block at least one connection to RS(even more if -064 * the rpc framework uses NIO). -065 * @param scan A configured {@link Scan} object. -066 * @param consumer the consumer used to receive results. -067 */ -068 void scan(Scan scan, RawScanResultConsumer consumer); -069 -070 /** -071 * Delegate to a protobuf rpc call. -072 *

-073 * Usually, it is just a simple lambda expression, like: -074 * -075 *

-076   * 
-077   * (stub, controller, rpcCallback) 
-> {
-078   *   XXXRequest request = ...; // 
prepare the request
-079   *   stub.xxx(controller, request, 
rpcCallback);
-080   * }
-081   * 
-082   * 
-083 * -084 * And if you can prepare the {@code request} before calling the coprocessorService method, the -085 * lambda expression will be: -086 * -087 *
-088   * 
-089   * (stub, controller, rpcCallback) 
-> stub.xxx(controller, request, rpcCallback)
-090   * 
-091   * 
-092 */ -093 @InterfaceAudience.Public -094 @FunctionalInterface -095 interface CoprocessorCallable { -096 -097/** -098 * Represent the actual protobuf rpc call. -099 * @param stub the asynchronous stub -100 * @param controller the rpc controller, has already been prepared for you -101 * @param rpcCallback the rpc callback, has already been prepared for you -102 */ -103void call(S stub, RpcController controller, RpcCallback rpcCallback); -104 } -105 -106 /** -107 * Execute the given coprocessor call on the region which contains the given {@code row}. -108 *

-109 * The {@code stubMaker} is just a delegation to the {@code newStub} call. Usually it is only a -110 * one line lambda expression, like: -111 * -112 *

-113   * 
-114   * channel -> 
xxxService.newStub(channel)
-115   * 
-116   * 
-117 * -118 * @param stubMaker a delegation to the actual {@code newStub} call. -119 * @param callable a delegation to the actual protobuf rpc call. See the comment of -120 * {@link CoprocessorCallable} for more details. -121 * @param row The row key used to identify the remote region location -122 * @param the type of the asynchronous stub -123 * @param the type of the return value -124 * @return the return value of the protobuf rpc call, wrapped by a {@link CompletableFuture}. -125 * @see CoprocessorCallable -126 */ -127 CompletableFuture coprocessorService(Function stubMaker, -128 CoprocessorCallable callable, byte[] row); -129 -130 /** -131 * The callback when

[08/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.Builder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.Builder.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.Builder.html
index 9b5e564..5f34a86 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.Builder.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.Builder.html
@@ -25,1367 +25,1374 @@
 017 */
 018package org.apache.hadoop.hbase.http;
 019
-020import java.io.FileNotFoundException;
-021import java.io.IOException;
-022import java.io.InterruptedIOException;
-023import java.io.PrintStream;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.URI;
-027import java.net.URISyntaxException;
-028import java.net.URL;
-029import java.util.ArrayList;
-030import java.util.Collections;
-031import java.util.Enumeration;
-032import java.util.HashMap;
-033import java.util.List;
-034import java.util.Map;
-035
-036import javax.servlet.Filter;
-037import javax.servlet.FilterChain;
-038import javax.servlet.FilterConfig;
-039import javax.servlet.ServletContext;
-040import javax.servlet.ServletException;
-041import javax.servlet.ServletRequest;
-042import javax.servlet.ServletResponse;
-043import javax.servlet.http.HttpServlet;
-044import 
javax.servlet.http.HttpServletRequest;
-045import 
javax.servlet.http.HttpServletRequestWrapper;
-046import 
javax.servlet.http.HttpServletResponse;
-047
-048import org.apache.commons.logging.Log;
-049import 
org.apache.commons.logging.LogFactory;
-050import 
org.apache.hadoop.HadoopIllegalArgumentException;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import 
org.apache.yetus.audience.InterfaceStability;
-053import 
org.apache.hadoop.conf.Configuration;
-054import 
org.apache.hadoop.fs.CommonConfigurationKeys;
-055import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-056import 
org.apache.hadoop.hbase.http.conf.ConfServlet;
-057import 
org.apache.hadoop.hbase.http.jmx.JMXJsonServlet;
-058import 
org.apache.hadoop.hbase.http.log.LogLevel;
-059import 
org.apache.hadoop.hbase.util.Threads;
-060import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-061import 
org.apache.hadoop.security.SecurityUtil;
-062import 
org.apache.hadoop.security.UserGroupInformation;
-063import 
org.apache.hadoop.security.authentication.server.AuthenticationFilter;
-064import 
org.apache.hadoop.security.authorize.AccessControlList;
-065import org.apache.hadoop.util.Shell;
-066
-067import 
org.eclipse.jetty.http.HttpVersion;
-068import org.eclipse.jetty.server.Server;
-069import 
org.eclipse.jetty.server.Handler;
-070import 
org.eclipse.jetty.server.HttpConfiguration;
-071import 
org.eclipse.jetty.server.HttpConnectionFactory;
-072import 
org.eclipse.jetty.server.ServerConnector;
-073import 
org.eclipse.jetty.server.SecureRequestCustomizer;
-074import 
org.eclipse.jetty.server.SslConnectionFactory;
-075import 
org.eclipse.jetty.server.handler.ContextHandlerCollection;
-076import 
org.eclipse.jetty.server.handler.HandlerCollection;
-077import 
org.eclipse.jetty.server.RequestLog;
-078import 
org.eclipse.jetty.server.handler.RequestLogHandler;
-079import 
org.eclipse.jetty.servlet.FilterMapping;
-080import 
org.eclipse.jetty.servlet.ServletHandler;
-081import 
org.eclipse.jetty.servlet.FilterHolder;
-082import 
org.eclipse.jetty.servlet.ServletContextHandler;
-083import 
org.eclipse.jetty.servlet.DefaultServlet;
-084import 
org.eclipse.jetty.servlet.ServletHolder;
-085import 
org.eclipse.jetty.util.MultiException;
-086import 
org.eclipse.jetty.util.ssl.SslContextFactory;
-087import 
org.eclipse.jetty.util.thread.QueuedThreadPool;
-088import 
org.eclipse.jetty.webapp.WebAppContext;
-089
-090import 
org.glassfish.jersey.server.ResourceConfig;
-091import 
org.glassfish.jersey.servlet.ServletContainer;
-092
-093import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-094import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-095
-096/**
-097 * Create a Jetty embedded server to 
answer http requests. The primary goal
-098 * is to serve up status information for 
the server.
-099 * There are three contexts:
-100 *   "/logs/" -> points to the log 
directory
-101 *   "/static/" -> points to 
common static files (src/webapps/static)
-102 *   "/" -> the jsp server code 
from (src/webapps/<name>)
-103 */
-104@InterfaceAudience.Private
-105@InterfaceStability.Evolving
-106public class HttpServer implements 
FilterContainer {
-107  private static final Log LOG = 
LogFactory.getLog(HttpServer.class);
-108  private static final String 
EMPTY_STRING = "";
-109
-110  private static final int 
DEFAULT_MAX_HEADER_SIZE = 64 * 1024; // 64K
-111
-112  static final String 
FILTER_INITIALIZERS_PROPERTY
-113  = 
"hbase.http.filter.initializers"

[37/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/http/HttpServer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/http/HttpServer.html 
b/devapidocs/org/apache/hadoop/hbase/http/HttpServer.html
index 581e91f..fad342a 100644
--- a/devapidocs/org/apache/hadoop/hbase/http/HttpServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/http/HttpServer.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":9,"i13":9,"i14":10,"i15":10,"i16":9,"i17":10,"i18":42,"i19":10,"i20":10,"i21":10,"i22":9,"i23":10,"i24":10,"i25":10,"i26":9,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":9};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":9,"i11":10,"i12":9,"i13":9,"i14":10,"i15":10,"i16":9,"i17":10,"i18":42,"i19":10,"i20":10,"i21":10,"i22":10,"i23":9,"i24":10,"i25":10,"i26":10,"i27":9,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":9};
 var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],2:["t2","Instance Methods"],8:["t4","Concrete 
Methods"],32:["t6","Deprecated Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -115,7 +115,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.Private
  @InterfaceStability.Evolving
-public class HttpServer
+public class HttpServer
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
 implements FilterContainer
 Create a Jetty embedded server to answer http requests. The 
primary goal
@@ -534,21 +534,25 @@ implements 
+http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true";
 title="class or interface in 
java.util">List
+getServerConnectors() 
+
+
 org.eclipse.jetty.webapp.WebAppContext
 getWebAppContext() 
 
-
+
 http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 getWebAppsPath(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String appName) 
 
-
+
 protected http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
 getWebAppsPath(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String webapps,
   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String appName)
 Get the pathname to the webapps files.
 
 
-
+
 static boolean
 hasAdministratorAccess(javax.servlet.ServletContext servletContext,
   javax.servlet.http.HttpServletRequest request,
@@ -557,14 +561,14 @@ implements 
+
 private void
 initializeWebServer(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String name,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String hostName,
org.apache.hadoop.conf.Configuration conf,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String[] pathSpecs) 
 
-
+
 private void
 initSpnego(org.apache.hadoop.conf.Configuration conf,
   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String hostName,
@@ -573,13 +577,13 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String kerberosNameRuleKey,
   http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in 
java.lang">String signatureSecretKeyFileKey) 
 
-
+
 boolean
 isAlive()
 Test for the availability of the web server
 
 
-
+
 static boolean
 isInstrumentationAccessAllowed(javax.servlet.ServletContext servletContext,
   
javax.servlet.http.HttpServletRequest request,
@@ -587,64 +591,64 @@ implements Checks the user has privileges to access to instrumentation 
servlets.
 
 
-
+
 private boolean
 isMissing(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String value)
 Returns true if the argument is non-null and not 
whitespace
 
 
-
+
 void
 join() 
 
-
+
 private void
 loadListeners() 
 
-
+
 (package private) void
 openListeners()
 Open the main listener for the ser

[28/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftServer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftServer.html 
b/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftServer.html
index 04db6db..ff49147 100644
--- a/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/thrift2/ThriftServer.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.LimitedPrivate(value="Tools")
-public class ThriftServer
+public class ThriftServer
 extends org.apache.hadoop.conf.Configured
 implements org.apache.hadoop.util.Tool
 ThriftServer - this class starts up a Thrift server which 
implements the HBase API specified in the
@@ -341,7 +341,7 @@ implements org.apache.hadoop.util.Tool
 
 
 log
-private static final org.apache.commons.logging.Log log
+private static final org.apache.commons.logging.Log log
 
 
 
@@ -350,7 +350,7 @@ implements org.apache.hadoop.util.Tool
 
 
 THRIFT_QOP_KEY
-static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String THRIFT_QOP_KEY
+static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String THRIFT_QOP_KEY
 Thrift quality of protection configuration key. Valid 
values can be:
  privacy: authentication, integrity and confidentiality checking
  integrity: authentication and integrity checking
@@ -370,7 +370,7 @@ implements org.apache.hadoop.util.Tool
 
 
 BACKLOG_CONF_KEY
-static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String BACKLOG_CONF_KEY
+static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String BACKLOG_CONF_KEY
 
 See Also:
 Constant
 Field Values
@@ -383,7 +383,7 @@ implements org.apache.hadoop.util.Tool
 
 
 DEFAULT_LISTEN_PORT
-public static final int DEFAULT_LISTEN_PORT
+public static final int DEFAULT_LISTEN_PORT
 
 See Also:
 Constant
 Field Values
@@ -396,7 +396,7 @@ implements org.apache.hadoop.util.Tool
 
 
 READ_TIMEOUT_OPTION
-private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String READ_TIMEOUT_OPTION
+private static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String READ_TIMEOUT_OPTION
 
 See Also:
 Constant
 Field Values
@@ -409,7 +409,7 @@ implements org.apache.hadoop.util.Tool
 
 
 THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY
-public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY
+public static final http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY
 Amount of time in milliseconds before a server thread will 
timeout
  waiting for client to send data on a connected socket. Currently,
  applies only to TBoundedThreadPoolServer
@@ -425,7 +425,7 @@ implements org.apache.hadoop.util.Tool
 
 
 THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT
-public static final int THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT
+public static final int THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT
 
 See Also:
 Constant
 Field Values
@@ -446,7 +446,7 @@ implements org.apache.hadoop.util.Tool
 
 
 ThriftServer
-public ThriftServer()
+public ThriftServer()
 
 
 
@@ -463,7 +463,7 @@ implements org.apache.hadoop.util.Tool
 
 
 printUsage
-private static void printUsage()
+private static void printUsage()
 
 
 
@@ -472,7 +472,7 @@ implements org.apache.hadoop.util.Tool
 
 
 getOptions
-private static org.apache.commons.cli.Options getOptions()
+private static org.apache.commons.cli.Options getOptions()
 
 
 
@@ -481,7 +481,7 @@ implements org.apache.hadoop.util.Tool
 
 
 parseArguments
-private static org.apache.commons.cli.CommandLine parseArguments(org.apache.hadoop.conf.Configuration conf,
+private static org.apache.commons.cli.CommandLine parseArguments(org.apache.hadoop.conf.Configuration conf,
  
org.apache.commons.cli.Options options,
  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String[] args)
   throws 
org.apache.commons.cli.ParseException,
@@ -499,7 +499,7 @@ implements org.apache.hadoop.util.Tool
 
 
 getTProtocolFactory
-private static org.apache.thrift.protocol.TPr

[03/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.html
index 9b5e564..5f34a86 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.html
@@ -25,1367 +25,1374 @@
 017 */
 018package org.apache.hadoop.hbase.http;
 019
-020import java.io.FileNotFoundException;
-021import java.io.IOException;
-022import java.io.InterruptedIOException;
-023import java.io.PrintStream;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.URI;
-027import java.net.URISyntaxException;
-028import java.net.URL;
-029import java.util.ArrayList;
-030import java.util.Collections;
-031import java.util.Enumeration;
-032import java.util.HashMap;
-033import java.util.List;
-034import java.util.Map;
-035
-036import javax.servlet.Filter;
-037import javax.servlet.FilterChain;
-038import javax.servlet.FilterConfig;
-039import javax.servlet.ServletContext;
-040import javax.servlet.ServletException;
-041import javax.servlet.ServletRequest;
-042import javax.servlet.ServletResponse;
-043import javax.servlet.http.HttpServlet;
-044import 
javax.servlet.http.HttpServletRequest;
-045import 
javax.servlet.http.HttpServletRequestWrapper;
-046import 
javax.servlet.http.HttpServletResponse;
-047
-048import org.apache.commons.logging.Log;
-049import 
org.apache.commons.logging.LogFactory;
-050import 
org.apache.hadoop.HadoopIllegalArgumentException;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import 
org.apache.yetus.audience.InterfaceStability;
-053import 
org.apache.hadoop.conf.Configuration;
-054import 
org.apache.hadoop.fs.CommonConfigurationKeys;
-055import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-056import 
org.apache.hadoop.hbase.http.conf.ConfServlet;
-057import 
org.apache.hadoop.hbase.http.jmx.JMXJsonServlet;
-058import 
org.apache.hadoop.hbase.http.log.LogLevel;
-059import 
org.apache.hadoop.hbase.util.Threads;
-060import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-061import 
org.apache.hadoop.security.SecurityUtil;
-062import 
org.apache.hadoop.security.UserGroupInformation;
-063import 
org.apache.hadoop.security.authentication.server.AuthenticationFilter;
-064import 
org.apache.hadoop.security.authorize.AccessControlList;
-065import org.apache.hadoop.util.Shell;
-066
-067import 
org.eclipse.jetty.http.HttpVersion;
-068import org.eclipse.jetty.server.Server;
-069import 
org.eclipse.jetty.server.Handler;
-070import 
org.eclipse.jetty.server.HttpConfiguration;
-071import 
org.eclipse.jetty.server.HttpConnectionFactory;
-072import 
org.eclipse.jetty.server.ServerConnector;
-073import 
org.eclipse.jetty.server.SecureRequestCustomizer;
-074import 
org.eclipse.jetty.server.SslConnectionFactory;
-075import 
org.eclipse.jetty.server.handler.ContextHandlerCollection;
-076import 
org.eclipse.jetty.server.handler.HandlerCollection;
-077import 
org.eclipse.jetty.server.RequestLog;
-078import 
org.eclipse.jetty.server.handler.RequestLogHandler;
-079import 
org.eclipse.jetty.servlet.FilterMapping;
-080import 
org.eclipse.jetty.servlet.ServletHandler;
-081import 
org.eclipse.jetty.servlet.FilterHolder;
-082import 
org.eclipse.jetty.servlet.ServletContextHandler;
-083import 
org.eclipse.jetty.servlet.DefaultServlet;
-084import 
org.eclipse.jetty.servlet.ServletHolder;
-085import 
org.eclipse.jetty.util.MultiException;
-086import 
org.eclipse.jetty.util.ssl.SslContextFactory;
-087import 
org.eclipse.jetty.util.thread.QueuedThreadPool;
-088import 
org.eclipse.jetty.webapp.WebAppContext;
-089
-090import 
org.glassfish.jersey.server.ResourceConfig;
-091import 
org.glassfish.jersey.servlet.ServletContainer;
-092
-093import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-094import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-095
-096/**
-097 * Create a Jetty embedded server to 
answer http requests. The primary goal
-098 * is to serve up status information for 
the server.
-099 * There are three contexts:
-100 *   "/logs/" -> points to the log 
directory
-101 *   "/static/" -> points to 
common static files (src/webapps/static)
-102 *   "/" -> the jsp server code 
from (src/webapps/<name>)
-103 */
-104@InterfaceAudience.Private
-105@InterfaceStability.Evolving
-106public class HttpServer implements 
FilterContainer {
-107  private static final Log LOG = 
LogFactory.getLog(HttpServer.class);
-108  private static final String 
EMPTY_STRING = "";
-109
-110  private static final int 
DEFAULT_MAX_HEADER_SIZE = 64 * 1024; // 64K
-111
-112  static final String 
FILTER_INITIALIZERS_PROPERTY
-113  = 
"hbase.http.filter.initializers";
-114  static final String HTTP_MAX_THRE

[31/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.html
index 423201e..9637583 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/AsyncProtobufLogWriter.html
@@ -123,7 +123,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class AsyncProtobufLogWriter
+public class AsyncProtobufLogWriter
 extends AbstractProtobufLogWriter
 implements AsyncFSWALProvider.AsyncWriter
 AsyncWriter for protobuf-based WAL.
@@ -318,7 +318,7 @@ implements 
 
 LOG
-private static final org.apache.commons.logging.Log LOG
+private static final org.apache.commons.logging.Log LOG
 
 
 
@@ -327,7 +327,7 @@ implements 
 
 eventLoop
-private 
final org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop eventLoop
+private 
final org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop eventLoop
 
 
 
@@ -336,7 +336,7 @@ implements 
 
 channelClass
-private final http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class channelClass
+private final http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class channelClass
 
 
 
@@ -345,7 +345,7 @@ implements 
 
 output
-private AsyncFSOutput output
+private AsyncFSOutput output
 
 
 
@@ -354,7 +354,7 @@ implements 
 
 asyncOutputWrapper
-private http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true";
 title="class or interface in java.io">OutputStream asyncOutputWrapper
+private http://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html?is-external=true";
 title="class or interface in java.io">OutputStream asyncOutputWrapper
 
 
 
@@ -371,7 +371,7 @@ implements 
 
 AsyncProtobufLogWriter
-public AsyncProtobufLogWriter(org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop eventLoop,
+public AsyncProtobufLogWriter(org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoop eventLoop,
   http://docs.oracle.com/javase/8/docs/api/java/lang/Class.html?is-external=true";
 title="class or interface in java.lang">Class channelClass)
 
 
@@ -389,7 +389,7 @@ implements 
 
 append
-public void append(WAL.Entry entry)
+public void append(WAL.Entry entry)
 
 Specified by:
 append in
 interface WALProvider.AsyncWriter
@@ -402,7 +402,7 @@ implements 
 
 sync
-public http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureLong> sync()
+public http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html?is-external=true";
 title="class or interface in java.util.concurrent">CompletableFutureLong> sync()
 
 Specified by:
 sync in
 interface WALProvider.AsyncWriter
@@ -415,7 +415,7 @@ implements 
 
 close
-public void close()
+public void close()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
 
 Specified by:
@@ -433,7 +433,7 @@ implements 
 
 getOutput
-public AsyncFSOutput getOutput()
+public AsyncFSOutput getOutput()
 
 
 
@@ -442,18 +442,20 @@ implements 
 
 initOutput
-protected void initOutput(org.apache.hadoop.fs.FileSystem fs,
+protected void initOutput(org.apache.hadoop.fs.FileSystem fs,
   org.apache.hadoop.fs.Path path,
   boolean overwritable,
   int bufferSize,
   short replication,
   long blockSize)
-   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException
+   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true";
 title="class or interface in java.io">IOException,
+  CommonFSUtils.StreamLacksCapabilityException
 
 Specified by:
 initOutput in
 class AbstractProtobufLogWriter
 Throws:
 http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html

[47/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/apidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.html 
b/apidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.html
index 22eba3b..ebf2220 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.html
@@ -46,214 +46,229 @@
 038 * 

039 * So, only experts that want to build high performance service should use this interface directly, 040 * especially for the {@link #scan(Scan, RawScanResultConsumer)} below. -041 *

-042 * TODO: For now the only difference between this interface and {@link AsyncTable} is the scan -043 * method. The {@link RawScanResultConsumer} exposes the implementation details of a scan(heartbeat) -044 * so it is not suitable for a normal user. If it is still the only difference after we implement -045 * most features of AsyncTable, we can think about merge these two interfaces. -046 * @since 2.0.0 -047 */ -048@InterfaceAudience.Public -049public interface RawAsyncTable extends AsyncTableBase { -050 -051 /** -052 * The basic scan API uses the observer pattern. All results that match the given scan object will -053 * be passed to the given {@code consumer} by calling {@code RawScanResultConsumer.onNext}. -054 * {@code RawScanResultConsumer.onComplete} means the scan is finished, and -055 * {@code RawScanResultConsumer.onError} means we hit an unrecoverable error and the scan is -056 * terminated. {@code RawScanResultConsumer.onHeartbeat} means the RS is still working but we can -057 * not get a valid result to call {@code RawScanResultConsumer.onNext}. This is usually because -058 * the matched results are too sparse, for example, a filter which almost filters out everything -059 * is specified. -060 *

-061 * Notice that, the methods of the given {@code consumer} will be called directly in the rpc -062 * framework's callback thread, so typically you should not do any time consuming work inside -063 * these methods, otherwise you will be likely to block at least one connection to RS(even more if -064 * the rpc framework uses NIO). -065 * @param scan A configured {@link Scan} object. -066 * @param consumer the consumer used to receive results. -067 */ -068 void scan(Scan scan, RawScanResultConsumer consumer); -069 -070 /** -071 * Delegate to a protobuf rpc call. -072 *

-073 * Usually, it is just a simple lambda expression, like: -074 * -075 *

-076   * 
-077   * (stub, controller, rpcCallback) 
-> {
-078   *   XXXRequest request = ...; // 
prepare the request
-079   *   stub.xxx(controller, request, 
rpcCallback);
-080   * }
-081   * 
-082   * 
-083 * -084 * And if you can prepare the {@code request} before calling the coprocessorService method, the -085 * lambda expression will be: -086 * -087 *
-088   * 
-089   * (stub, controller, rpcCallback) 
-> stub.xxx(controller, request, rpcCallback)
-090   * 
-091   * 
-092 */ -093 @InterfaceAudience.Public -094 @FunctionalInterface -095 interface CoprocessorCallable { -096 -097/** -098 * Represent the actual protobuf rpc call. -099 * @param stub the asynchronous stub -100 * @param controller the rpc controller, has already been prepared for you -101 * @param rpcCallback the rpc callback, has already been prepared for you -102 */ -103void call(S stub, RpcController controller, RpcCallback rpcCallback); -104 } -105 -106 /** -107 * Execute the given coprocessor call on the region which contains the given {@code row}. -108 *

-109 * The {@code stubMaker} is just a delegation to the {@code newStub} call. Usually it is only a -110 * one line lambda expression, like: -111 * -112 *

-113   * 
-114   * channel -> 
xxxService.newStub(channel)
-115   * 
-116   * 
-117 * -118 * @param stubMaker a delegation to the actual {@code newStub} call. -119 * @param callable a delegation to the actual protobuf rpc call. See the comment of -120 * {@link CoprocessorCallable} for more details. -121 * @param row The row key used to identify the remote region location -122 * @param the type of the asynchronous stub -123 * @param the type of the return value -124 * @return the return value of the protobuf rpc call, wrapped by a {@link CompletableFuture}. -125 * @see CoprocessorCallable -126 */ -127 CompletableFuture coprocessorService(Function stubMaker, -128 CoprocessorCallable callable, byte[] row); -129 -130 /** -131 * The callback when we want to execute a coprocessor call on a range of regions. -132 *

-133 * As the locating its


[21/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallback.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallback.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallback.html
index 22eba3b..ebf2220 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallback.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallback.html
@@ -46,214 +46,229 @@
 038 * 

039 * So, only experts that want to build high performance service should use this interface directly, 040 * especially for the {@link #scan(Scan, RawScanResultConsumer)} below. -041 *

-042 * TODO: For now the only difference between this interface and {@link AsyncTable} is the scan -043 * method. The {@link RawScanResultConsumer} exposes the implementation details of a scan(heartbeat) -044 * so it is not suitable for a normal user. If it is still the only difference after we implement -045 * most features of AsyncTable, we can think about merge these two interfaces. -046 * @since 2.0.0 -047 */ -048@InterfaceAudience.Public -049public interface RawAsyncTable extends AsyncTableBase { -050 -051 /** -052 * The basic scan API uses the observer pattern. All results that match the given scan object will -053 * be passed to the given {@code consumer} by calling {@code RawScanResultConsumer.onNext}. -054 * {@code RawScanResultConsumer.onComplete} means the scan is finished, and -055 * {@code RawScanResultConsumer.onError} means we hit an unrecoverable error and the scan is -056 * terminated. {@code RawScanResultConsumer.onHeartbeat} means the RS is still working but we can -057 * not get a valid result to call {@code RawScanResultConsumer.onNext}. This is usually because -058 * the matched results are too sparse, for example, a filter which almost filters out everything -059 * is specified. -060 *

-061 * Notice that, the methods of the given {@code consumer} will be called directly in the rpc -062 * framework's callback thread, so typically you should not do any time consuming work inside -063 * these methods, otherwise you will be likely to block at least one connection to RS(even more if -064 * the rpc framework uses NIO). -065 * @param scan A configured {@link Scan} object. -066 * @param consumer the consumer used to receive results. -067 */ -068 void scan(Scan scan, RawScanResultConsumer consumer); -069 -070 /** -071 * Delegate to a protobuf rpc call. -072 *

-073 * Usually, it is just a simple lambda expression, like: -074 * -075 *

-076   * 
-077   * (stub, controller, rpcCallback) 
-> {
-078   *   XXXRequest request = ...; // 
prepare the request
-079   *   stub.xxx(controller, request, 
rpcCallback);
-080   * }
-081   * 
-082   * 
-083 * -084 * And if you can prepare the {@code request} before calling the coprocessorService method, the -085 * lambda expression will be: -086 * -087 *
-088   * 
-089   * (stub, controller, rpcCallback) 
-> stub.xxx(controller, request, rpcCallback)
-090   * 
-091   * 
-092 */ -093 @InterfaceAudience.Public -094 @FunctionalInterface -095 interface CoprocessorCallable { -096 -097/** -098 * Represent the actual protobuf rpc call. -099 * @param stub the asynchronous stub -100 * @param controller the rpc controller, has already been prepared for you -101 * @param rpcCallback the rpc callback, has already been prepared for you -102 */ -103void call(S stub, RpcController controller, RpcCallback rpcCallback); -104 } -105 -106 /** -107 * Execute the given coprocessor call on the region which contains the given {@code row}. -108 *

-109 * The {@code stubMaker} is just a delegation to the {@code newStub} call. Usually it is only a -110 * one line lambda expression, like: -111 * -112 *

-113   * 
-114   * channel -> 
xxxService.newStub(channel)
-115   * 
-116   * 
-117 * -118 * @param stubMaker a delegation to the actual {@code newStub} call. -119 * @param callable a delegation to the actual protobuf rpc call. See the comment of -120 * {@link CoprocessorCallable} for more details. -121 * @param row The row key used to identify the remote region location -122 * @param the type of the asynchronous stub -123 * @param the type of the return value -124 * @return the return value of the protobuf rpc call, wrapped by a {@link CompletableFuture}. -125 * @see CoprocessorCallable -126 */ -127 CompletableFuture coprocessorService(Function stubMaker, -128 CoprocessorCallable callable, byte[] row); -129 -130 /** -131 * Th

[50/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 727cc8f..8214ace 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20171101144728+00'00')
-/CreationDate (D:20171101144728+00'00')
+/ModDate (D:20171103144740+00'00')
+/CreationDate (D:20171103144740+00'00')
 >>
 endobj
 2 0 obj
@@ -27334,7 +27334,7 @@ endobj
 endobj
 136 0 obj
 << /Limits [(__anchor-top) (adding.new.node)]
-/Names [(__anchor-top) 25 0 R (__indexterm-6979032) 3272 0 R 
(__indexterm-6981282) 3274 0 R (__indexterm-6983344) 3275 0 R 
(__indexterm-6985218) 3276 0 R (acid) 897 0 R 
(add-metric-name-and-function-to-hadoop-compat-interface) 3372 0 R 
(add-the-implementation-to-both-hadoop-1-and-hadoop-2-compat-modules) 3373 0 R 
(add.metrics) 3370 0 R (adding-a-new-chapter-to-the-hbase-reference-guide) 3613 
0 R (adding.new.node) 2863 0 R]
+/Names [(__anchor-top) 25 0 R (__indexterm-6979034) 3272 0 R 
(__indexterm-6981284) 3274 0 R (__indexterm-6983346) 3275 0 R 
(__indexterm-6985220) 3276 0 R (acid) 897 0 R 
(add-metric-name-and-function-to-hadoop-compat-interface) 3372 0 R 
(add-the-implementation-to-both-hadoop-1-and-hadoop-2-compat-modules) 3373 0 R 
(add.metrics) 3370 0 R (adding-a-new-chapter-to-the-hbase-reference-guide) 3613 
0 R (adding.new.node) 2863 0 R]
 >>
 endobj
 137 0 obj
@@ -657193,7 +657193,7 @@ endobj
 >>
 endobj
 3454 0 obj
-<< /Length 7585
+<< /Length 7587
 >>
 stream
 q
@@ -657555,7 +657555,7 @@ ET
 BT
 147.24 538.625 Td
 /F4.0 11 Tf
-<6d6f636b69746f2d616c6c> Tj
+<6d6f636b69746f2d636f7265> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -657564,7 +657564,7 @@ ET
 0.1843 0.4353 0.6235 SCN
 
 BT
-207.74 538.625 Td
+213.24 538.625 Td
 /F4.0 11 Tf
 <3c2f617274696661637449643e> Tj
 ET
@@ -657599,7 +657599,7 @@ ET
 BT
 130.74 523.885 Td
 /F4.0 11 Tf
-<312e392e35> Tj
+<322e312e30> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -760706,1142 +760706,1142 @@ xref
 0009925777 0 n 
 0009926144 0 n 
 0009926343 0 n 
-0009933983 0 n 
-0009934372 0 n 
-0009934420 0 n 
-0009934600 0 n 
-0009934726 0 n 
-0009979077 0 n 
-0009979425 0 n 
-0010011598 0 n 
-0010011978 0 n 
-0010012026 0 n 
-0010012195 0 n 
-0010056901 0 n 
-0010057249 0 n 
-0010063972 0 n 
-0010064365 0 n 
-0010064413 0 n 
-0010064539 0 n 
-0010112214 0 n 
-0010112594 0 n 
-0010112847 0 n 
-0010113253 0 n 
-0010113601 0 n 
-0010113649 0 n 
-0010129199 0 n 
-0010129619 0 n 
-0010129667 0 n 
-0010129860 0 n 
-0010129986 0 n 
-0010130101 0 n 
-0010130232 0 n 
-0010130280 0 n 
-0010153141 0 n 
-0010153489 0 n 
-0010172380 0 n 
-0010172786 0 n 
-0010172834 0 n 
-0010172979 0 n 
-0010191527 0 n 
-0010191933 0 n 
-0010192155 0 n 
-0010200168 0 n 
-0010200588 0 n 
-0010200636 0 n 
-0010201115 0 n 
-0010201322 0 n 
-0010201535 0 n 
-0010201747 0 n 
-0010201940 0 n 
-0010202167 0 n 
-0010222098 0 n 
-0010222500 0 n 
-0010222548 0 n 
-0010222743 0 n 
-0010222937 0 n 
-0010222985 0 n 
-0010243141 0 n 
-0010243534 0 n 
-0010243582 0 n 
-0010243827 0 n 
-0010274745 0 n 
-0010275119 0 n 
-0010275167 0 n 
-0010294354 0 n 
-0010294715 0 n 
-0010303677 0 n 
-0010304051 0 n 
-0010304098 0 n 
-0010304611 0 n 
-0010304658 0 n 
-0010314851 0 n 
-0010315212 0 n 
-0010315260 0 n 
-0010315308 0 n 
-0010315356 0 n 
-0010318271 0 n 
-0010318651 0 n 
-0010318699 0 n 
-0010318895 0 n 
-0010318943 0 n 
-0010319331 0 n 
-0010319679 0 n 
-0010319727 0 n 
-0010332562 0 n 
-0010333000 0 n 
-0010333048 0 n 
-0010333236 0 n 
-0010333285 0 n 
-0010333548 0 n 
-0010333672 0 n 
-0010333935 0 n 
-0010333984 0 n 
-0010334171 0 n 
-0010334220 0 n 
-0010334405 0 n 
-0010334454 0 n 
-0010338795 0 n 
-0010339175 0 n 
-0010339223 0 n 
-0010339271 0 n 
-0010339534 0 n 
-0010342904 0 n 
-0010343284 0 n 
-0010343332 0 n 
-0010343517 0 n 
-0010343903 0 n 
-0010344251 0 n 
-0010344299 0 n 
-0010356223 0 n 
-0010356634 0 n 
-0010356682 0 n 
-0010357154 0 n 
-0010357330 0 n 
-0010357496 0 n 
-0010357621 0 n 
-0010357669 0 n 
-0010357717 0 n 
-0010371381 0 n 
-0010371789 0 n 
-0010371837 0 n 
-0010372057 0 n 
-0010392230 0 n 
-0010392641 0 n 
-0010392784 0 n 
-0010392831 0 n 
-0010392879 0 n 
-0010392927 0 n 
-0010393096 0 n 
-0010393281 0 n 
-0010418870 0 n 
-001

[06/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.QuotingInputFilter.RequestQuoter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.QuotingInputFilter.RequestQuoter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.QuotingInputFilter.RequestQuoter.html
index 9b5e564..5f34a86 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.QuotingInputFilter.RequestQuoter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.QuotingInputFilter.RequestQuoter.html
@@ -25,1367 +25,1374 @@
 017 */
 018package org.apache.hadoop.hbase.http;
 019
-020import java.io.FileNotFoundException;
-021import java.io.IOException;
-022import java.io.InterruptedIOException;
-023import java.io.PrintStream;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.URI;
-027import java.net.URISyntaxException;
-028import java.net.URL;
-029import java.util.ArrayList;
-030import java.util.Collections;
-031import java.util.Enumeration;
-032import java.util.HashMap;
-033import java.util.List;
-034import java.util.Map;
-035
-036import javax.servlet.Filter;
-037import javax.servlet.FilterChain;
-038import javax.servlet.FilterConfig;
-039import javax.servlet.ServletContext;
-040import javax.servlet.ServletException;
-041import javax.servlet.ServletRequest;
-042import javax.servlet.ServletResponse;
-043import javax.servlet.http.HttpServlet;
-044import 
javax.servlet.http.HttpServletRequest;
-045import 
javax.servlet.http.HttpServletRequestWrapper;
-046import 
javax.servlet.http.HttpServletResponse;
-047
-048import org.apache.commons.logging.Log;
-049import 
org.apache.commons.logging.LogFactory;
-050import 
org.apache.hadoop.HadoopIllegalArgumentException;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import 
org.apache.yetus.audience.InterfaceStability;
-053import 
org.apache.hadoop.conf.Configuration;
-054import 
org.apache.hadoop.fs.CommonConfigurationKeys;
-055import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-056import 
org.apache.hadoop.hbase.http.conf.ConfServlet;
-057import 
org.apache.hadoop.hbase.http.jmx.JMXJsonServlet;
-058import 
org.apache.hadoop.hbase.http.log.LogLevel;
-059import 
org.apache.hadoop.hbase.util.Threads;
-060import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-061import 
org.apache.hadoop.security.SecurityUtil;
-062import 
org.apache.hadoop.security.UserGroupInformation;
-063import 
org.apache.hadoop.security.authentication.server.AuthenticationFilter;
-064import 
org.apache.hadoop.security.authorize.AccessControlList;
-065import org.apache.hadoop.util.Shell;
-066
-067import 
org.eclipse.jetty.http.HttpVersion;
-068import org.eclipse.jetty.server.Server;
-069import 
org.eclipse.jetty.server.Handler;
-070import 
org.eclipse.jetty.server.HttpConfiguration;
-071import 
org.eclipse.jetty.server.HttpConnectionFactory;
-072import 
org.eclipse.jetty.server.ServerConnector;
-073import 
org.eclipse.jetty.server.SecureRequestCustomizer;
-074import 
org.eclipse.jetty.server.SslConnectionFactory;
-075import 
org.eclipse.jetty.server.handler.ContextHandlerCollection;
-076import 
org.eclipse.jetty.server.handler.HandlerCollection;
-077import 
org.eclipse.jetty.server.RequestLog;
-078import 
org.eclipse.jetty.server.handler.RequestLogHandler;
-079import 
org.eclipse.jetty.servlet.FilterMapping;
-080import 
org.eclipse.jetty.servlet.ServletHandler;
-081import 
org.eclipse.jetty.servlet.FilterHolder;
-082import 
org.eclipse.jetty.servlet.ServletContextHandler;
-083import 
org.eclipse.jetty.servlet.DefaultServlet;
-084import 
org.eclipse.jetty.servlet.ServletHolder;
-085import 
org.eclipse.jetty.util.MultiException;
-086import 
org.eclipse.jetty.util.ssl.SslContextFactory;
-087import 
org.eclipse.jetty.util.thread.QueuedThreadPool;
-088import 
org.eclipse.jetty.webapp.WebAppContext;
-089
-090import 
org.glassfish.jersey.server.ResourceConfig;
-091import 
org.glassfish.jersey.servlet.ServletContainer;
-092
-093import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-094import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-095
-096/**
-097 * Create a Jetty embedded server to 
answer http requests. The primary goal
-098 * is to serve up status information for 
the server.
-099 * There are three contexts:
-100 *   "/logs/" -> points to the log 
directory
-101 *   "/static/" -> points to 
common static files (src/webapps/static)
-102 *   "/" -> the jsp server code 
from (src/webapps/<name>)
-103 */
-104@InterfaceAudience.Private
-105@InterfaceStability.Evolving
-106public class HttpServer implements 
FilterContainer {
-107  private static final Log LOG = 
LogFactory.getLog(HttpServer.class);
-108  private static final String 
EMPTY_STRING = "";
-109
-110  private static final int 
DEFAULT_MAX_HEADER_SIZ

[04/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.StackServlet.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.StackServlet.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.StackServlet.html
index 9b5e564..5f34a86 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.StackServlet.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/http/HttpServer.StackServlet.html
@@ -25,1367 +25,1374 @@
 017 */
 018package org.apache.hadoop.hbase.http;
 019
-020import java.io.FileNotFoundException;
-021import java.io.IOException;
-022import java.io.InterruptedIOException;
-023import java.io.PrintStream;
-024import java.net.BindException;
-025import java.net.InetSocketAddress;
-026import java.net.URI;
-027import java.net.URISyntaxException;
-028import java.net.URL;
-029import java.util.ArrayList;
-030import java.util.Collections;
-031import java.util.Enumeration;
-032import java.util.HashMap;
-033import java.util.List;
-034import java.util.Map;
-035
-036import javax.servlet.Filter;
-037import javax.servlet.FilterChain;
-038import javax.servlet.FilterConfig;
-039import javax.servlet.ServletContext;
-040import javax.servlet.ServletException;
-041import javax.servlet.ServletRequest;
-042import javax.servlet.ServletResponse;
-043import javax.servlet.http.HttpServlet;
-044import 
javax.servlet.http.HttpServletRequest;
-045import 
javax.servlet.http.HttpServletRequestWrapper;
-046import 
javax.servlet.http.HttpServletResponse;
-047
-048import org.apache.commons.logging.Log;
-049import 
org.apache.commons.logging.LogFactory;
-050import 
org.apache.hadoop.HadoopIllegalArgumentException;
-051import 
org.apache.yetus.audience.InterfaceAudience;
-052import 
org.apache.yetus.audience.InterfaceStability;
-053import 
org.apache.hadoop.conf.Configuration;
-054import 
org.apache.hadoop.fs.CommonConfigurationKeys;
-055import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-056import 
org.apache.hadoop.hbase.http.conf.ConfServlet;
-057import 
org.apache.hadoop.hbase.http.jmx.JMXJsonServlet;
-058import 
org.apache.hadoop.hbase.http.log.LogLevel;
-059import 
org.apache.hadoop.hbase.util.Threads;
-060import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-061import 
org.apache.hadoop.security.SecurityUtil;
-062import 
org.apache.hadoop.security.UserGroupInformation;
-063import 
org.apache.hadoop.security.authentication.server.AuthenticationFilter;
-064import 
org.apache.hadoop.security.authorize.AccessControlList;
-065import org.apache.hadoop.util.Shell;
-066
-067import 
org.eclipse.jetty.http.HttpVersion;
-068import org.eclipse.jetty.server.Server;
-069import 
org.eclipse.jetty.server.Handler;
-070import 
org.eclipse.jetty.server.HttpConfiguration;
-071import 
org.eclipse.jetty.server.HttpConnectionFactory;
-072import 
org.eclipse.jetty.server.ServerConnector;
-073import 
org.eclipse.jetty.server.SecureRequestCustomizer;
-074import 
org.eclipse.jetty.server.SslConnectionFactory;
-075import 
org.eclipse.jetty.server.handler.ContextHandlerCollection;
-076import 
org.eclipse.jetty.server.handler.HandlerCollection;
-077import 
org.eclipse.jetty.server.RequestLog;
-078import 
org.eclipse.jetty.server.handler.RequestLogHandler;
-079import 
org.eclipse.jetty.servlet.FilterMapping;
-080import 
org.eclipse.jetty.servlet.ServletHandler;
-081import 
org.eclipse.jetty.servlet.FilterHolder;
-082import 
org.eclipse.jetty.servlet.ServletContextHandler;
-083import 
org.eclipse.jetty.servlet.DefaultServlet;
-084import 
org.eclipse.jetty.servlet.ServletHolder;
-085import 
org.eclipse.jetty.util.MultiException;
-086import 
org.eclipse.jetty.util.ssl.SslContextFactory;
-087import 
org.eclipse.jetty.util.thread.QueuedThreadPool;
-088import 
org.eclipse.jetty.webapp.WebAppContext;
-089
-090import 
org.glassfish.jersey.server.ResourceConfig;
-091import 
org.glassfish.jersey.servlet.ServletContainer;
-092
-093import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-094import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-095
-096/**
-097 * Create a Jetty embedded server to 
answer http requests. The primary goal
-098 * is to serve up status information for 
the server.
-099 * There are three contexts:
-100 *   "/logs/" -> points to the log 
directory
-101 *   "/static/" -> points to 
common static files (src/webapps/static)
-102 *   "/" -> the jsp server code 
from (src/webapps/<name>)
-103 */
-104@InterfaceAudience.Private
-105@InterfaceStability.Evolving
-106public class HttpServer implements 
FilterContainer {
-107  private static final Log LOG = 
LogFactory.getLog(HttpServer.class);
-108  private static final String 
EMPTY_STRING = "";
-109
-110  private static final int 
DEFAULT_MAX_HEADER_SIZE = 64 * 1024; // 64K
-111
-112  static final String 
FILTER_INITIALIZERS_PROPERTY
-113  = 
"hbas

[14/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.html
index 0d33cae..19fa457 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTableImpl.html
@@ -26,564 +26,607 @@
 018package org.apache.hadoop.hbase.client;
 019
 020import static 
java.util.stream.Collectors.toList;
-021import static 
org.apache.hadoop.hbase.HConstants.EMPTY_END_ROW;
-022import static 
org.apache.hadoop.hbase.HConstants.EMPTY_START_ROW;
-023import static 
org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
-024import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+021import static 
org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
+022import static 
org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
+023
+024import com.google.protobuf.RpcChannel;
 025
-026import com.google.protobuf.RpcChannel;
-027
-028import java.io.IOException;
-029import java.util.ArrayList;
-030import java.util.Arrays;
-031import java.util.List;
-032import java.util.Optional;
-033import 
java.util.concurrent.CompletableFuture;
-034import java.util.concurrent.TimeUnit;
-035import 
java.util.concurrent.atomic.AtomicBoolean;
-036import 
java.util.concurrent.atomic.AtomicInteger;
-037import java.util.function.Function;
-038
-039import 
org.apache.hadoop.conf.Configuration;
-040import 
org.apache.hadoop.hbase.CompareOperator;
-041import 
org.apache.hadoop.hbase.HRegionLocation;
-042import 
org.apache.hadoop.hbase.TableName;
-043import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
-044import 
org.apache.hadoop.hbase.filter.BinaryComparator;
-045import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-046import 
org.apache.hadoop.hbase.util.Bytes;
-047import 
org.apache.hadoop.hbase.util.ReflectionUtils;
-048import 
org.apache.yetus.audience.InterfaceAudience;
-049
-050import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
-051import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
-052import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-053import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
-054import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
-055import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
-056import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest;
-057import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse;
-058import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
-059import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse;
-060import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
-061import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse;
-062import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
-063import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
-064
-065/**
-066 * The implementation of RawAsyncTable.
-067 */
-068@InterfaceAudience.Private
-069class RawAsyncTableImpl implements 
RawAsyncTable {
+026import java.io.IOException;
+027import java.util.ArrayList;
+028import java.util.Arrays;
+029import java.util.List;
+030import 
java.util.concurrent.CompletableFuture;
+031import java.util.concurrent.TimeUnit;
+032import 
java.util.concurrent.atomic.AtomicBoolean;
+033import 
java.util.concurrent.atomic.AtomicInteger;
+034import java.util.function.Function;
+035
+036import 
org.apache.hadoop.conf.Configuration;
+037import 
org.apache.hadoop.hbase.CompareOperator;
+038import 
org.apache.hadoop.hbase.HConstants;
+039import 
org.apache.hadoop.hbase.HRegionLocation;
+040import 
org.apache.hadoop.hbase.TableName;
+041import 
org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
+042import 
org.apache.hadoop.hbase.filter.BinaryComparator;
+043import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
+044import 
org.apache.hadoop.hbase.util.Bytes;
+045import 
org.apache.hadoop.hbase.util.ReflectionUtils;
+046import 
org.apache.yetus.audience.InterfaceAudience;
+047
+048import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+049import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
+050import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+051import 
org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
+052import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
+053import 
org.apache.hadoop.hbase.shaded.protobuf.

[42/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CoprocessorServiceBuilderImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CoprocessorServiceBuilderImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CoprocessorServiceBuilderImpl.html
new file mode 100644
index 000..7b1620c
--- /dev/null
+++ 
b/devapidocs/org/apache/hadoop/hbase/client/RawAsyncTableImpl.CoprocessorServiceBuilderImpl.html
@@ -0,0 +1,459 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+RawAsyncTableImpl.CoprocessorServiceBuilderImpl (Apache HBase 
3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = {"i0":10,"i1":10,"i2":10};
+var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.client
+Class 
RawAsyncTableImpl.CoprocessorServiceBuilderImpl
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.client.RawAsyncTableImpl.CoprocessorServiceBuilderImpl
+
+
+
+
+
+
+
+All Implemented Interfaces:
+RawAsyncTable.CoprocessorServiceBuilder
+
+
+Enclosing class:
+RawAsyncTableImpl
+
+
+
+private final class RawAsyncTableImpl.CoprocessorServiceBuilderImpl
+extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
+implements RawAsyncTable.CoprocessorServiceBuilder
+
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields 
+
+Modifier and Type
+Field and Description
+
+
+private RawAsyncTable.CoprocessorCallable
+callable 
+
+
+private RawAsyncTable.CoprocessorCallback
+callback 
+
+
+private byte[]
+endKey 
+
+
+private boolean
+endKeyInclusive 
+
+
+private byte[]
+startKey 
+
+
+private boolean
+startKeyInclusive 
+
+
+private http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true";
 title="class or interface in 
java.util.function">Function
+stubMaker 
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Constructor and Description
+
+
+CoprocessorServiceBuilderImpl(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true";
 title="class or interface in 
java.util.function">Function stubMaker,
+ RawAsyncTable.CoprocessorCallable callable,
+ RawAsyncTable.CoprocessorCallback callback) 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All Methods Instance Methods Concrete Methods 
+
+Modifier and Type
+Method and Description
+
+
+void
+execute()
+Execute the coprocessorService request.
+
+
+
+RawAsyncTableImpl.CoprocessorServiceBuilderImpl
+fromRow(byte[] startKey,
+   boolean inclusive) 
+
+
+RawAsyncTableImpl.CoprocessorServiceBuilderImpl
+toRow(byte[] endKey,
+ boolean inclusive) 
+
+
+
+
+
+
+Methods inherited from class java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#clone--";
 title="class or interface in java.lang">clone, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#equals-java.lang.Object-";
 title="class or interface in java.lang">equals, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#finalize--";
 title="class or interface in java.lang">finalize, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#getClass--";
 title="class or interface in java.lang">getClass, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true#hashCode--";
 title="class or interface in java.lang">hashCode, http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is

[49/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/apidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncTable.CoprocessorCallback.html
--
diff --git 
a/apidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncTable.CoprocessorCallback.html
 
b/apidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncTable.CoprocessorCallback.html
index c0d39b3..1d78d22 100644
--- 
a/apidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncTable.CoprocessorCallback.html
+++ 
b/apidocs/org/apache/hadoop/hbase/client/class-use/RawAsyncTable.CoprocessorCallback.html
@@ -105,27 +105,11 @@
 
 
 
- void
-RawAsyncTable.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true";
 title="class or interface in 
java.util.function">Function stubMaker,
+ org.apache.hadoop.hbase.client.RawAsyncTable.CoprocessorServiceBuilder
+RawAsyncTable.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true";
 title="class or interface in 
java.util.function">Function stubMaker,
   RawAsyncTable.CoprocessorCallable callable,
-  byte[] startKey,
-  boolean startKeyInclusive,
-  byte[] endKey,
-  boolean endKeyInclusive,
   RawAsyncTable.CoprocessorCallback callback)
-Execute the given coprocessor call on the regions which are 
covered by the range from
- startKey and endKey.
-
-
-
-default  void
-RawAsyncTable.coprocessorService(http://docs.oracle.com/javase/8/docs/api/java/util/function/Function.html?is-external=true";
 title="class or interface in 
java.util.function">Function stubMaker,
-  RawAsyncTable.CoprocessorCallable callable,
-  byte[] startKey,
-  byte[] endKey,
-  RawAsyncTable.CoprocessorCallback callback)
-Execute the given coprocessor call on the regions which are 
covered by the range from
- startKey inclusive and endKey exclusive.
+Execute a coprocessor call on the regions which are covered 
by a range.
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/apidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallable.html
--
diff --git 
a/apidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallable.html
 
b/apidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallable.html
index 22eba3b..ebf2220 100644
--- 
a/apidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallable.html
+++ 
b/apidocs/src-html/org/apache/hadoop/hbase/client/RawAsyncTable.CoprocessorCallable.html
@@ -46,214 +46,229 @@
 038 * 

039 * So, only experts that want to build high performance service should use this interface directly, 040 * especially for the {@link #scan(Scan, RawScanResultConsumer)} below. -041 *

-042 * TODO: For now the only difference between this interface and {@link AsyncTable} is the scan -043 * method. The {@link RawScanResultConsumer} exposes the implementation details of a scan(heartbeat) -044 * so it is not suitable for a normal user. If it is still the only difference after we implement -045 * most features of AsyncTable, we can think about merge these two interfaces. -046 * @since 2.0.0 -047 */ -048@InterfaceAudience.Public -049public interface RawAsyncTable extends AsyncTableBase { -050 -051 /** -052 * The basic scan API uses the observer pattern. All results that match the given scan object will -053 * be passed to the given {@code consumer} by calling {@code RawScanResultConsumer.onNext}. -054 * {@code RawScanResultConsumer.onComplete} means the scan is finished, and -055 * {@code RawScanResultConsumer.onError} means we hit an unrecoverable error and the scan is -056 * terminated. {@code RawScanResultConsumer.onHeartbeat} means the RS is still working but we can -057 * not get a valid result to call {@code RawScanResultConsumer.onNext}. This is usually because -058 * the matched results are too sparse, for example, a filter which almost filters out everything -059 * is specified. -060 *

-061 * Notice that, the methods of the given {@code consumer} will be called directly in the rpc -062 * framework's callback thread, so typically you should not do any time consuming work inside -063 * these methods, otherwise you will be likely to block at least one connection to RS(even more if -064 * the rpc framework uses NIO). -065 * @param scan A configured {@link Scan} object. -066 * @param consumer the consumer used to receive results. -067 */ -068 void scan(Scan scan, RawScanResultConsumer consumer); -069 -070 /** -071 * Delegate to a


[11/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderBlocks.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderBlocks.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderBlocks.html
index 1553cc6..1daa9e8 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderBlocks.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/fs/HFileSystem.ReorderBlocks.html
@@ -73,434 +73,431 @@
 065public class HFileSystem extends 
FilterFileSystem {
 066  public static final Log LOG = 
LogFactory.getLog(HFileSystem.class);
 067
-068  /** Parameter name for HBase WAL 
directory */
-069  public static final String 
HBASE_WAL_DIR = "hbase.wal.dir";
-070
-071  private final FileSystem noChecksumFs;  
 // read hfile data from storage
-072  private final boolean 
useHBaseChecksum;
-073  private static volatile byte 
unspecifiedStoragePolicyId = Byte.MIN_VALUE;
-074
-075  /**
-076   * Create a FileSystem object for HBase 
regionservers.
-077   * @param conf The configuration to be 
used for the filesystem
-078   * @param useHBaseChecksum if true, 
then use
-079   *checksum verfication in 
hbase, otherwise
-080   *delegate checksum 
verification to the FileSystem.
-081   */
-082  public HFileSystem(Configuration conf, 
boolean useHBaseChecksum)
-083throws IOException {
-084
-085// Create the default filesystem with 
checksum verification switched on.
-086// By default, any operation to this 
FilterFileSystem occurs on
-087// the underlying filesystem that has 
checksums switched on.
-088this.fs = FileSystem.get(conf);
-089this.useHBaseChecksum = 
useHBaseChecksum;
-090
-091fs.initialize(getDefaultUri(conf), 
conf);
-092
-093// disable checksum verification for 
local fileSystem, see HBASE-11218
-094if (fs instanceof LocalFileSystem) 
{
-095  fs.setWriteChecksum(false);
-096  fs.setVerifyChecksum(false);
-097}
-098
-099addLocationsOrderInterceptor(conf);
-100
-101// If hbase checksum verification is 
switched on, then create a new
-102// filesystem object that has cksum 
verification turned off.
-103// We will avoid verifying checksums 
in the fs client, instead do it
-104// inside of hbase.
-105// If this is the local file system 
hadoop has a bug where seeks
-106// do not go to the correct location 
if setVerifyChecksum(false) is called.
-107// This manifests itself in that 
incorrect data is read and HFileBlocks won't be able to read
-108// their header magic numbers. See 
HBASE-5885
-109if (useHBaseChecksum && !(fs 
instanceof LocalFileSystem)) {
-110  conf = new Configuration(conf);
-111  
conf.setBoolean("dfs.client.read.shortcircuit.skip.checksum", true);
-112  this.noChecksumFs = 
maybeWrapFileSystem(newInstanceFileSystem(conf), conf);
-113  
this.noChecksumFs.setVerifyChecksum(false);
-114} else {
-115  this.noChecksumFs = 
maybeWrapFileSystem(fs, conf);
-116}
+068  private final FileSystem noChecksumFs;  
 // read hfile data from storage
+069  private final boolean 
useHBaseChecksum;
+070  private static volatile byte 
unspecifiedStoragePolicyId = Byte.MIN_VALUE;
+071
+072  /**
+073   * Create a FileSystem object for HBase 
regionservers.
+074   * @param conf The configuration to be 
used for the filesystem
+075   * @param useHBaseChecksum if true, 
then use
+076   *checksum verfication in 
hbase, otherwise
+077   *delegate checksum 
verification to the FileSystem.
+078   */
+079  public HFileSystem(Configuration conf, 
boolean useHBaseChecksum)
+080throws IOException {
+081
+082// Create the default filesystem with 
checksum verification switched on.
+083// By default, any operation to this 
FilterFileSystem occurs on
+084// the underlying filesystem that has 
checksums switched on.
+085this.fs = FileSystem.get(conf);
+086this.useHBaseChecksum = 
useHBaseChecksum;
+087
+088fs.initialize(getDefaultUri(conf), 
conf);
+089
+090// disable checksum verification for 
local fileSystem, see HBASE-11218
+091if (fs instanceof LocalFileSystem) 
{
+092  fs.setWriteChecksum(false);
+093  fs.setVerifyChecksum(false);
+094}
+095
+096addLocationsOrderInterceptor(conf);
+097
+098// If hbase checksum verification is 
switched on, then create a new
+099// filesystem object that has cksum 
verification turned off.
+100// We will avoid verifying checksums 
in the fs client, instead do it
+101// inside of hbase.
+102// If this is the local file system 
hadoop has a bug where seeks
+103// do not go to the correct location 
if setVerifyChecksum(false) is called.
+104// This manifests itself in that 
incorrect data is read and HFileBlocks won't be able to read
+105// their header magic 

[27/51] [partial] hbase-site git commit: Published site at .

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/7d38bdbb/devapidocs/org/apache/hadoop/hbase/util/CommonFSUtils.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/util/CommonFSUtils.html 
b/devapidocs/org/apache/hadoop/hbase/util/CommonFSUtils.html
new file mode 100644
index 000..5e3b8f3
--- /dev/null
+++ b/devapidocs/org/apache/hadoop/hbase/util/CommonFSUtils.html
@@ -0,0 +1,1435 @@
+http://www.w3.org/TR/html4/loose.dtd";>
+
+
+
+
+
+CommonFSUtils (Apache HBase 3.0.0-SNAPSHOT API)
+
+
+
+
+
+var methods = 
{"i0":9,"i1":9,"i2":9,"i3":9,"i4":9,"i5":9,"i6":9,"i7":9,"i8":9,"i9":9,"i10":9,"i11":9,"i12":9,"i13":9,"i14":9,"i15":9,"i16":9,"i17":9,"i18":9,"i19":9,"i20":9,"i21":9,"i22":9,"i23":9,"i24":9,"i25":9,"i26":9,"i27":9,"i28":9,"i29":9,"i30":9,"i31":9,"i32":9,"i33":9,"i34":9,"i35":9,"i36":9,"i37":9,"i38":9,"i39":9};
+var tabs = {65535:["t0","All Methods"],1:["t1","Static 
Methods"],8:["t4","Concrete Methods"]};
+var altColor = "altColor";
+var rowColor = "rowColor";
+var tableTab = "tableTab";
+var activeTableTab = "activeTableTab";
+
+
+JavaScript is disabled on your browser.
+
+
+
+
+
+Skip navigation links
+
+
+
+
+Overview
+Package
+Class
+Use
+Tree
+Deprecated
+Index
+Help
+
+
+
+
+Prev Class
+Next Class
+
+
+Frames
+No Frames
+
+
+All Classes
+
+
+
+
+
+
+
+Summary: 
+Nested | 
+Field | 
+Constr | 
+Method
+
+
+Detail: 
+Field | 
+Constr | 
+Method
+
+
+
+
+
+
+
+
+org.apache.hadoop.hbase.util
+Class CommonFSUtils
+
+
+
+http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">java.lang.Object
+
+
+org.apache.hadoop.hbase.util.CommonFSUtils
+
+
+
+
+
+
+
+Direct Known Subclasses:
+FSUtils
+
+
+
+@InterfaceAudience.Private
+public abstract class CommonFSUtils
+extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true";
 title="class or interface in java.lang">Object
+Utility methods for interacting with the underlying file 
system.
+
+
+
+
+
+
+
+
+
+
+
+Nested Class Summary
+
+Nested Classes 
+
+Modifier and Type
+Class and Description
+
+
+private static class 
+CommonFSUtils.StreamCapabilities 
+
+
+static class 
+CommonFSUtils.StreamLacksCapabilityException
+Helper exception for those cases where the place where we 
need to check a stream capability
+ is not where we have the needed context to explain the impact and mitigation 
for a lack.
+
+
+
+
+
+
+
+
+
+
+Field Summary
+
+Fields 
+
+Modifier and Type
+Field and Description
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+FULL_RWX_PERMISSIONS
+Full access permissions (starting point for a umask)
+
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true";
 title="class or interface in java.lang">String
+HBASE_WAL_DIR
+Parameter name for HBase WAL directory
+
+
+
+private static 
org.apache.commons.logging.Log
+LOG 
+
+
+private static http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true";
 title="class or interface in 
java.util">MapBoolean>
+warningMap 
+
+
+
+
+
+
+
+
+
+Constructor Summary
+
+Constructors 
+
+Modifier
+Constructor and Description
+
+
+protected 
+CommonFSUtils() 
+
+
+
+
+
+
+
+
+
+Method Summary
+
+All Methods Static Methods Concrete Methods 
+
+Modifier and Type
+Method and Description
+
+
+static void
+checkShortCircuitReadBufferSize(org.apache.hadoop.conf.Configuration conf)
+Check if short circuit read buffer size is set and if not, 
set it to hbase value.
+
+
+
+static 
org.apache.hadoop.fs.FSDataOutputStream
+create(org.apache.hadoop.fs.FileSystem fs,
+  org.apache.hadoop.fs.Path path,
+  org.apache.hadoop.fs.permission.FsPermission perm,
+  boolean overwrite)
+Create the specified file on the filesystem.
+
+
+
+static boolean
+delete(org.apache.hadoop.fs.FileSystem fs,
+  org.apache.hadoop.fs.Path path,
+  boolean recursive)
+Calls fs.delete() and returns the value returned by the 
fs.delete()
+
+
+
+static boolean
+deleteDirectory(org.apache.hadoop.fs.FileSystem fs,
+   org.apache.hadoop.fs.Path dir)
+Delete if exists.
+
+
+
+static org.apache.hadoop.fs.FileSystem
+getCurrentFileSystem(org.apache.hadoop.conf.Configuration conf) 
+
+
+static long
+getDefaultBlockSize(org.apache.hadoop.f