hbase git commit: HBASE-18444 Add support for specifying custom meta table suffix - revert due to change in TestTableName

2017-09-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18477 8fb2c9a46 -> 755f5a133


HBASE-18444 Add support for specifying custom meta table suffix - revert due to 
change in TestTableName


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/755f5a13
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/755f5a13
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/755f5a13

Branch: refs/heads/HBASE-18477
Commit: 755f5a1338891b2878acdfb6827f10fb33af8aef
Parents: 8fb2c9a
Author: tedyu 
Authored: Wed Sep 6 18:29:11 2017 -0700
Committer: tedyu 
Committed: Wed Sep 6 18:29:11 2017 -0700

--
 .../java/org/apache/hadoop/hbase/TableName.java | 39 ++--
 .../apache/hadoop/hbase/util/TestTableName.java | 47 +---
 2 files changed, 6 insertions(+), 80 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/755f5a13/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
index 9addcd4..3477098 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
@@ -24,13 +24,9 @@ import java.util.Arrays;
 import java.util.Set;
 import java.util.concurrent.CopyOnWriteArraySet;
 
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
 
 /**
  * Immutable POJO class for representing a table name.
@@ -58,7 +54,6 @@ import org.apache.hadoop.hbase.util.Bytes;
  */
 @InterfaceAudience.Public
 public final class TableName implements Comparable {
-  private static final Log LOG = LogFactory.getLog(TableName.class);
 
   /** See {@link #createTableNameIfNecessary(ByteBuffer, ByteBuffer)} */
   private static final Set tableCache = new CopyOnWriteArraySet<>();
@@ -82,11 +77,9 @@ public final class TableName implements 
Comparable {
   "(?:(?:(?:"+VALID_NAMESPACE_REGEX+"\\"+NAMESPACE_DELIM+")?)" +
  "(?:"+VALID_TABLE_QUALIFIER_REGEX+"))";
 
-  public static final String DEFAULT_META_TABLE_NAME_STR = "meta";
-  public static final String META_TABLE_SUFFIX = "hbase.meta.table.suffix";
-
-  /** The meta table's name. */
-  public static final TableName META_TABLE_NAME = 
getMetaTableName(HBaseConfiguration.create());
+  /** The hbase:meta table's name. */
+  public static final TableName META_TABLE_NAME =
+  valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "meta");
 
   /** The Namespace table's name. */
   public static final TableName NAMESPACE_TABLE_NAME =
@@ -558,28 +551,4 @@ public final class TableName implements 
Comparable {
 }
 return KeyValue.COMPARATOR;
   }
-
-  public static TableName getMetaTableName(Configuration conf) {
-String metaTableName = DEFAULT_META_TABLE_NAME_STR;
-String metaTableSuffix = conf.get(META_TABLE_SUFFIX, "");
-
-if(isValidMetaTableSuffix(metaTableSuffix)) {
-  metaTableName = DEFAULT_META_TABLE_NAME_STR + "_" + metaTableSuffix;
-}
-return (valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, 
metaTableName));
-  }
-
-  public static boolean isValidMetaTableSuffix(String metaTableSuffix) {
-if(StringUtils.isBlank(metaTableSuffix)) {
-  return false;
-}
-
-try {
-  isLegalTableQualifierName(Bytes.toBytes(metaTableSuffix));
-} catch(IllegalArgumentException iae) {
-  LOG.warn("Invalid meta table suffix", iae);
-  return false;
-}
-return true;
-  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/755f5a13/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableName.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableName.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableName.java
index 1453bd2..f585f47 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableName.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableName.java
@@ -23,16 +23,12 @@ import java.util.Map;
 
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertSame;
-import static org.junit.Assert.assertTrue;
 

hbase git commit: HBASE-18768 Move TestTableName to hbase-common from hbase-server

2017-09-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18477 755f5a133 -> e0d0beceb


HBASE-18768 Move TestTableName to hbase-common from hbase-server

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e0d0bece
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e0d0bece
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e0d0bece

Branch: refs/heads/HBASE-18477
Commit: e0d0beceb8ba18df3544ac4af8ef1f9d78b569ab
Parents: 755f5a1
Author: Michael Stack 
Authored: Wed Sep 6 13:25:26 2017 -0700
Committer: tedyu 
Committed: Wed Sep 6 18:29:33 2017 -0700

--
 .../org/apache/hadoop/hbase/TestTableName.java  | 191 +++
 .../TestCorruptedRegionStoreFile.java   |   2 +-
 .../TestScannerRetriableFailure.java|   2 +-
 .../security/access/TestAccessController2.java  |   2 +-
 .../access/TestCellACLWithMultipleVersions.java |   2 +-
 .../hbase/security/access/TestCellACLs.java |   2 +-
 .../access/TestScanEarlyTermination.java|   2 +-
 .../access/TestWithDisabledAuthorization.java   |   2 +-
 .../snapshot/TestSnapshotClientRetries.java |   2 +-
 .../apache/hadoop/hbase/util/TestTableName.java | 190 --
 10 files changed, 199 insertions(+), 198 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e0d0bece/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java
new file mode 100644
index 000..54e25e8
--- /dev/null
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java
@@ -0,0 +1,191 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.fail;
+
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestWatcher;
+import org.junit.runner.Description;
+
+/**
+ * Returns a {@code byte[]} containing the name of the currently running test 
method.
+ */
+@Category({MiscTests.class, MediumTests.class})
+public class TestTableName extends TestWatcher {
+  private TableName tableName;
+
+  /**
+   * Invoked when a test is about to start
+   */
+  @Override
+  protected void starting(Description description) {
+tableName = TableName.valueOf(description.getMethodName());
+  }
+
+  public TableName getTableName() {
+return tableName;
+  }
+
+  String emptyNames[] ={"", " "};
+  String invalidNamespace[] = {":a", "%:a"};
+  String legalTableNames[] = { "foo", "with-dash_under.dot", "_under_start_ok",
+  "with-dash.with_underscore", "02-01-2012.my_table_01-02", 
"xyz._mytable_", "9_9_0.table_02"
+  , "dot1.dot2.table", "new.-mytable", "with-dash.with.dot", "legal..t2", 
"legal..legal.t2",
+  "trailingdots..", "trailing.dots...", "ns:mytable", "ns:_mytable_", 
"ns:my_table_01-02"};
+  String illegalTableNames[] = { ".dot_start_illegal", "-dash_start_illegal", 
"spaces not ok",
+  "-dash-.start_illegal", "new.table with space", "01 .table", 
"ns:-illegaldash",
+  "new:.illegaldot", "new:illegalcolon1:", "new:illegalcolon1:2"};
+
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testInvalidNamespace() {
+for (String tn : invalidNamespace) {
+  TableName.isLegalFullyQualifiedTableName(Bytes.toBytes(tn));
+  fail("invalid namespace " + tn + " should have 

hbase git commit: HBASE-18768 Move TestTableName to hbase-common from hbase-server

2017-09-06 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/branch-2 e25401db4 -> 7592cb8d3


HBASE-18768 Move TestTableName to hbase-common from hbase-server

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7592cb8d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7592cb8d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7592cb8d

Branch: refs/heads/branch-2
Commit: 7592cb8d341ab747cb2fb1a2a12119d230b24557
Parents: e25401d
Author: Michael Stack 
Authored: Wed Sep 6 13:25:26 2017 -0700
Committer: Michael Stack 
Committed: Wed Sep 6 18:12:14 2017 -0700

--
 .../org/apache/hadoop/hbase/TestTableName.java  | 191 +++
 .../TestCorruptedRegionStoreFile.java   |   2 +-
 .../TestScannerRetriableFailure.java|   2 +-
 .../security/access/TestAccessController2.java  |   2 +-
 .../access/TestCellACLWithMultipleVersions.java |   2 +-
 .../hbase/security/access/TestCellACLs.java |   2 +-
 .../access/TestScanEarlyTermination.java|   2 +-
 .../access/TestWithDisabledAuthorization.java   |   2 +-
 .../snapshot/TestSnapshotClientRetries.java |   2 +-
 .../apache/hadoop/hbase/util/TestTableName.java | 190 --
 10 files changed, 199 insertions(+), 198 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7592cb8d/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java
new file mode 100644
index 000..54e25e8
--- /dev/null
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java
@@ -0,0 +1,191 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.fail;
+
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestWatcher;
+import org.junit.runner.Description;
+
+/**
+ * Returns a {@code byte[]} containing the name of the currently running test 
method.
+ */
+@Category({MiscTests.class, MediumTests.class})
+public class TestTableName extends TestWatcher {
+  private TableName tableName;
+
+  /**
+   * Invoked when a test is about to start
+   */
+  @Override
+  protected void starting(Description description) {
+tableName = TableName.valueOf(description.getMethodName());
+  }
+
+  public TableName getTableName() {
+return tableName;
+  }
+
+  String emptyNames[] ={"", " "};
+  String invalidNamespace[] = {":a", "%:a"};
+  String legalTableNames[] = { "foo", "with-dash_under.dot", "_under_start_ok",
+  "with-dash.with_underscore", "02-01-2012.my_table_01-02", 
"xyz._mytable_", "9_9_0.table_02"
+  , "dot1.dot2.table", "new.-mytable", "with-dash.with.dot", "legal..t2", 
"legal..legal.t2",
+  "trailingdots..", "trailing.dots...", "ns:mytable", "ns:_mytable_", 
"ns:my_table_01-02"};
+  String illegalTableNames[] = { ".dot_start_illegal", "-dash_start_illegal", 
"spaces not ok",
+  "-dash-.start_illegal", "new.table with space", "01 .table", 
"ns:-illegaldash",
+  "new:.illegaldot", "new:illegalcolon1:", "new:illegalcolon1:2"};
+
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testInvalidNamespace() {
+for (String tn : invalidNamespace) {
+  TableName.isLegalFullyQualifiedTableName(Bytes.toBytes(tn));
+  fail("invalid namespace " + tn + " should have 

hbase git commit: HBASE-18768 Move TestTableName to hbase-common from hbase-server

2017-09-06 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/master fd49a9826 -> 3a9dc8fbd


HBASE-18768 Move TestTableName to hbase-common from hbase-server

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3a9dc8fb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3a9dc8fb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3a9dc8fb

Branch: refs/heads/master
Commit: 3a9dc8fbd58ab6dc4a895cbcf9644422fb5b8223
Parents: fd49a98
Author: Michael Stack 
Authored: Wed Sep 6 13:25:26 2017 -0700
Committer: Michael Stack 
Committed: Wed Sep 6 18:11:32 2017 -0700

--
 .../org/apache/hadoop/hbase/TestTableName.java  | 191 +++
 .../TestCorruptedRegionStoreFile.java   |   2 +-
 .../TestScannerRetriableFailure.java|   2 +-
 .../security/access/TestAccessController2.java  |   2 +-
 .../access/TestCellACLWithMultipleVersions.java |   2 +-
 .../hbase/security/access/TestCellACLs.java |   2 +-
 .../access/TestScanEarlyTermination.java|   2 +-
 .../access/TestWithDisabledAuthorization.java   |   2 +-
 .../snapshot/TestSnapshotClientRetries.java |   2 +-
 .../apache/hadoop/hbase/util/TestTableName.java | 190 --
 10 files changed, 199 insertions(+), 198 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3a9dc8fb/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java
new file mode 100644
index 000..54e25e8
--- /dev/null
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestTableName.java
@@ -0,0 +1,191 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.fail;
+
+import org.apache.hadoop.hbase.testclassification.MiscTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestWatcher;
+import org.junit.runner.Description;
+
+/**
+ * Returns a {@code byte[]} containing the name of the currently running test 
method.
+ */
+@Category({MiscTests.class, MediumTests.class})
+public class TestTableName extends TestWatcher {
+  private TableName tableName;
+
+  /**
+   * Invoked when a test is about to start
+   */
+  @Override
+  protected void starting(Description description) {
+tableName = TableName.valueOf(description.getMethodName());
+  }
+
+  public TableName getTableName() {
+return tableName;
+  }
+
+  String emptyNames[] ={"", " "};
+  String invalidNamespace[] = {":a", "%:a"};
+  String legalTableNames[] = { "foo", "with-dash_under.dot", "_under_start_ok",
+  "with-dash.with_underscore", "02-01-2012.my_table_01-02", 
"xyz._mytable_", "9_9_0.table_02"
+  , "dot1.dot2.table", "new.-mytable", "with-dash.with.dot", "legal..t2", 
"legal..legal.t2",
+  "trailingdots..", "trailing.dots...", "ns:mytable", "ns:_mytable_", 
"ns:my_table_01-02"};
+  String illegalTableNames[] = { ".dot_start_illegal", "-dash_start_illegal", 
"spaces not ok",
+  "-dash-.start_illegal", "new.table with space", "01 .table", 
"ns:-illegaldash",
+  "new:.illegaldot", "new:illegalcolon1:", "new:illegalcolon1:2"};
+
+
+  @Test(expected = IllegalArgumentException.class)
+  public void testInvalidNamespace() {
+for (String tn : invalidNamespace) {
+  TableName.isLegalFullyQualifiedTableName(Bytes.toBytes(tn));
+  fail("invalid namespace " + tn + " should have 

hbase git commit: HBASE-18720 [C++] Update wangle dependency to pick up the new release with Apache License v2

2017-09-06 Thread enis
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14850 63374e779 -> 2b5c8e27c


HBASE-18720 [C++] Update wangle dependency to pick up the new release with 
Apache License v2


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2b5c8e27
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2b5c8e27
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2b5c8e27

Branch: refs/heads/HBASE-14850
Commit: 2b5c8e27c1094fc8c88b0797047b8e4bec7a3d86
Parents: 63374e7
Author: Enis Soztutar 
Authored: Wed Sep 6 16:29:18 2017 -0700
Committer: Enis Soztutar 
Committed: Wed Sep 6 16:29:46 2017 -0700

--
 hbase-native-client/connection/rpc-test-server.cc | 2 +-
 hbase-native-client/docker-files/Dockerfile   | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2b5c8e27/hbase-native-client/connection/rpc-test-server.cc
--
diff --git a/hbase-native-client/connection/rpc-test-server.cc 
b/hbase-native-client/connection/rpc-test-server.cc
index 707bca7..157ea71 100644
--- a/hbase-native-client/connection/rpc-test-server.cc
+++ b/hbase-native-client/connection/rpc-test-server.cc
@@ -85,7 +85,7 @@ Future 
RpcTestService::operator()(std::unique_ptrset_resp_msg(pb_resp_msg);
 VLOG(1) << "RPC server:"
 << " error called.";
-
response->set_exception(folly::make_exception_wrapper("server 
error!"));
+response->set_exception(RpcTestException("server error!"));
 
   } else if (method_name == "pause") {
 auto pb_resp_msg = std::make_shared();

http://git-wip-us.apache.org/repos/asf/hbase/blob/2b5c8e27/hbase-native-client/docker-files/Dockerfile
--
diff --git a/hbase-native-client/docker-files/Dockerfile 
b/hbase-native-client/docker-files/Dockerfile
index efd9a9d..1265b83 100644
--- a/hbase-native-client/docker-files/Dockerfile
+++ b/hbase-native-client/docker-files/Dockerfile
@@ -86,7 +86,7 @@ RUN git clone https://github.com/google/protobuf.git 
/usr/src/protobuf && \
 
 # Update folly
 RUN cd /usr/src/ && \
-  ver=2017.06.19.00 && \
+  ver=2017.09.04.00 && \
   wget https://github.com/facebook/folly/archive/v$ver.tar.gz && \
   tar zxf v$ver.tar.gz && \
   rm -rf v$ver.tar.gz && \
@@ -106,7 +106,7 @@ RUN cd /usr/src/ && \
 
 # Update wangle
 RUN cd /usr/src/ && \
-  ver=2017.06.26.00 && \
+  ver=2017.09.04.00 && \
   wget https://github.com/facebook/wangle/archive/v$ver.tar.gz && \
   tar zxf v$ver.tar.gz && \
   rm -rf v$ver.tar.gz && \



hbase git commit: HBASE-18652 Expose individual cache stats in a CombinedCache through JMX

2017-09-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 bc3c72927 -> 1cca36b3b


HBASE-18652 Expose individual cache stats in a CombinedCache through JMX

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1cca36b3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1cca36b3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1cca36b3

Branch: refs/heads/branch-1.4
Commit: 1cca36b3b57067b6f550792433f28427f9f31fcd
Parents: bc3c729
Author: Biju Nair 
Authored: Wed Sep 6 16:07:19 2017 -0400
Committer: tedyu 
Committed: Wed Sep 6 13:31:55 2017 -0700

--
 .../regionserver/MetricsRegionServerSource.java | 17 +
 .../MetricsRegionServerWrapper.java | 40 +++
 .../MetricsRegionServerSourceImpl.java  | 16 +
 .../hadoop/hbase/io/hfile/CacheConfig.java  | 28 ++--
 .../MetricsRegionServerWrapperImpl.java | 71 +++-
 .../MetricsRegionServerWrapperStub.java | 40 +++
 .../regionserver/TestMetricsRegionServer.java   |  8 +++
 7 files changed, 214 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1cca36b3/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
index 3ac678e..b72deb8 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
@@ -345,6 +345,23 @@ public interface MetricsRegionServerSource extends 
BaseSource, JvmPauseMonitorSo
   String BLOCK_CACHE_DELETE_FAMILY_BLOOM_HIT_COUNT = 
"blockCacheDeleteFamilyBloomHitCount";
   String BLOCK_CACHE_TRAILER_HIT_COUNT = "blockCacheTrailerHitCount";
 
+  String L1_CACHE_HIT_COUNT = "l1CacheHitCount";
+  String L1_CACHE_HIT_COUNT_DESC = "L1 cache hit count.";
+  String L1_CACHE_MISS_COUNT = "l1CacheMissCount";
+  String L1_CACHE_MISS_COUNT_DESC = "L1 cache miss count.";
+  String L1_CACHE_HIT_RATIO = "l1CacheHitRatio";
+  String L1_CACHE_HIT_RATIO_DESC = "L1 cache hit ratio.";
+  String L1_CACHE_MISS_RATIO = "l1CacheMissRatio";
+  String L1_CACHE_MISS_RATIO_DESC = "L1 cache miss ratio.";
+  String L2_CACHE_HIT_COUNT = "l2CacheHitCount";
+  String L2_CACHE_HIT_COUNT_DESC = "L2 cache hit count.";
+  String L2_CACHE_MISS_COUNT = "l2CacheMissCount";
+  String L2_CACHE_MISS_COUNT_DESC = "L2 cache miss count.";
+  String L2_CACHE_HIT_RATIO = "l2CacheHitRatio";
+  String L2_CACHE_HIT_RATIO_DESC = "L2 cache hit ratio.";
+  String L2_CACHE_MISS_RATIO = "l2CacheMissRatio";
+  String L2_CACHE_MISS_RATIO_DESC = "L2 cache miss ratio.";
+
   String RS_START_TIME_NAME = "regionServerStartTime";
   String ZOOKEEPER_QUORUM_NAME = "zookeeperQuorum";
   String SERVER_NAME_NAME = "serverName";

http://git-wip-us.apache.org/repos/asf/hbase/blob/1cca36b3/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
index 7d7f66d..3344dce 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
@@ -259,6 +259,46 @@ public interface MetricsRegionServerWrapper {
   long getBlockCacheFailedInsertions();
 
   /**
+   * Hit count of L1 cache.
+   */
+  public long getL1CacheHitCount();
+
+  /**
+   * Miss count of L1 cache.
+   */
+  public long getL1CacheMissCount();
+
+  /**
+   * Hit ratio of L1 cache.
+   */
+  public double getL1CacheHitRatio();
+
+  /**
+   * Miss ratio of L1 cache.
+   */
+  public double getL1CacheMissRatio();
+
+  /**
+   * Hit count of L2 cache.
+   */
+  public long getL2CacheHitCount();
+
+  /**
+   * Miss count of L2 cache.
+   */
+  public long getL2CacheMissCount();
+
+  /**
+   * Hit ratio of L2 cache.
+   */
+  public double getL2CacheHitRatio();
+
+  /**
+   * Miss ratio of L2 cache.
+   */
+  public double getL2CacheMissRatio();
+
+  /**
* Force a re-computation of the metrics.
*/
   void forceRecompute();


hbase git commit: HBASE-18652 Expose individual cache stats in a CombinedCache through JMX

2017-09-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 5de4a7d89 -> 5bfe1da98


HBASE-18652 Expose individual cache stats in a CombinedCache through JMX

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5bfe1da9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5bfe1da9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5bfe1da9

Branch: refs/heads/branch-1
Commit: 5bfe1da9847b88e1ee81bc5e166faf493f042d2d
Parents: 5de4a7d
Author: Biju Nair 
Authored: Wed Sep 6 16:07:19 2017 -0400
Committer: tedyu 
Committed: Wed Sep 6 13:30:44 2017 -0700

--
 .../regionserver/MetricsRegionServerSource.java | 17 +
 .../MetricsRegionServerWrapper.java | 40 +++
 .../MetricsRegionServerSourceImpl.java  | 16 +
 .../hadoop/hbase/io/hfile/CacheConfig.java  | 28 ++--
 .../MetricsRegionServerWrapperImpl.java | 71 +++-
 .../MetricsRegionServerWrapperStub.java | 40 +++
 .../regionserver/TestMetricsRegionServer.java   |  8 +++
 7 files changed, 214 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5bfe1da9/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
index 3ac678e..b72deb8 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
@@ -345,6 +345,23 @@ public interface MetricsRegionServerSource extends 
BaseSource, JvmPauseMonitorSo
   String BLOCK_CACHE_DELETE_FAMILY_BLOOM_HIT_COUNT = 
"blockCacheDeleteFamilyBloomHitCount";
   String BLOCK_CACHE_TRAILER_HIT_COUNT = "blockCacheTrailerHitCount";
 
+  String L1_CACHE_HIT_COUNT = "l1CacheHitCount";
+  String L1_CACHE_HIT_COUNT_DESC = "L1 cache hit count.";
+  String L1_CACHE_MISS_COUNT = "l1CacheMissCount";
+  String L1_CACHE_MISS_COUNT_DESC = "L1 cache miss count.";
+  String L1_CACHE_HIT_RATIO = "l1CacheHitRatio";
+  String L1_CACHE_HIT_RATIO_DESC = "L1 cache hit ratio.";
+  String L1_CACHE_MISS_RATIO = "l1CacheMissRatio";
+  String L1_CACHE_MISS_RATIO_DESC = "L1 cache miss ratio.";
+  String L2_CACHE_HIT_COUNT = "l2CacheHitCount";
+  String L2_CACHE_HIT_COUNT_DESC = "L2 cache hit count.";
+  String L2_CACHE_MISS_COUNT = "l2CacheMissCount";
+  String L2_CACHE_MISS_COUNT_DESC = "L2 cache miss count.";
+  String L2_CACHE_HIT_RATIO = "l2CacheHitRatio";
+  String L2_CACHE_HIT_RATIO_DESC = "L2 cache hit ratio.";
+  String L2_CACHE_MISS_RATIO = "l2CacheMissRatio";
+  String L2_CACHE_MISS_RATIO_DESC = "L2 cache miss ratio.";
+
   String RS_START_TIME_NAME = "regionServerStartTime";
   String ZOOKEEPER_QUORUM_NAME = "zookeeperQuorum";
   String SERVER_NAME_NAME = "serverName";

http://git-wip-us.apache.org/repos/asf/hbase/blob/5bfe1da9/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
index 7d7f66d..3344dce 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
@@ -259,6 +259,46 @@ public interface MetricsRegionServerWrapper {
   long getBlockCacheFailedInsertions();
 
   /**
+   * Hit count of L1 cache.
+   */
+  public long getL1CacheHitCount();
+
+  /**
+   * Miss count of L1 cache.
+   */
+  public long getL1CacheMissCount();
+
+  /**
+   * Hit ratio of L1 cache.
+   */
+  public double getL1CacheHitRatio();
+
+  /**
+   * Miss ratio of L1 cache.
+   */
+  public double getL1CacheMissRatio();
+
+  /**
+   * Hit count of L2 cache.
+   */
+  public long getL2CacheHitCount();
+
+  /**
+   * Miss count of L2 cache.
+   */
+  public long getL2CacheMissCount();
+
+  /**
+   * Hit ratio of L2 cache.
+   */
+  public double getL2CacheHitRatio();
+
+  /**
+   * Miss ratio of L2 cache.
+   */
+  public double getL2CacheMissRatio();
+
+  /**
* Force a re-computation of the metrics.
*/
   void forceRecompute();


hbase git commit: HBASE-18737 Display configured max size of cache on RS UI

2017-09-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 975c31b3b -> bc3c72927


HBASE-18737 Display configured max size of cache on RS UI

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/bc3c7292
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/bc3c7292
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/bc3c7292

Branch: refs/heads/branch-1.4
Commit: bc3c7292767392507e3ff2b5e3f0514ba0426436
Parents: 975c31b
Author: Biju Nair 
Authored: Wed Sep 6 12:16:05 2017 -0400
Committer: tedyu 
Committed: Wed Sep 6 12:59:08 2017 -0700

--
 .../hadoop/hbase/io/hfile/MemcachedBlockCache.java |  5 +
 .../hbase/tmpl/regionserver/BlockCacheTmpl.jamon   | 13 +
 .../hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon  |  6 +++---
 .../hbase/tmpl/regionserver/ServerMetricsTmpl.jamon|  4 +++-
 .../org/apache/hadoop/hbase/io/hfile/BlockCache.java   |  6 ++
 .../hadoop/hbase/io/hfile/CombinedBlockCache.java  |  5 +
 .../apache/hadoop/hbase/io/hfile/LruBlockCache.java|  2 ++
 .../hadoop/hbase/io/hfile/bucket/BucketCache.java  |  1 +
 .../hbase/regionserver/TestHeapMemoryManager.java  |  5 +
 9 files changed, 39 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/bc3c7292/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
--
diff --git 
a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
 
b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
index f50a117..4357262 100644
--- 
a/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
+++ 
b/hbase-external-blockcache/src/main/java/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.java
@@ -195,6 +195,11 @@ public class MemcachedBlockCache implements BlockCache {
   }
 
   @Override
+  public long getMaxSize() {
+return 0;
+  }
+
+  @Override
   public long getFreeSize() {
 return 0;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/bc3c7292/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
index 3d2606b..e7fd378 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/BlockCacheTmpl.jamon
@@ -326,25 +326,30 @@ are combined counts. Request count is sum of hits and 
misses.
 
 
 
-Count
+Cache Size Limit
+<% TraditionalBinaryPrefix.long2String(bc.getMaxSize(), "B", 1) 
%>
+Max size of cache
+
+
+Block Count
 <% String.format("%,d", bc.getBlockCount()) %>
 Count of Blocks
 
 <%if !bucketCache %>
 
-Count
+Data Block Count
 <% String.format("%,d", bc.getDataBlockCount()) %>
 Count of DATA Blocks
 
 
 
-Size
+Size of Blocks
 <% TraditionalBinaryPrefix.long2String(bc.getCurrentSize(), "B", 
1) %>
 Size of Blocks
 
 <%if !bucketCache %>
 
-Size
+Size of Data Blocks
 <% TraditionalBinaryPrefix.long2String(bc.getCurrentDataSize(), 
"B", 1) %>
 Size of DATA Blocks
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/bc3c7292/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon
index b98f50d..a8d4003 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RSStatusTmpl.jamon
@@ -109,12 +109,12 @@ org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
 
 
 
-<& ../common/TaskMonitorTmpl; filter = filter &>
+Block Cache
+<& BlockCacheTmpl; cacheConfig = regionServer.getCacheConfig(); config = 
regionServer.getConfiguration() &>
 
 
 
-Block Cache
-<& BlockCacheTmpl; cacheConfig = regionServer.getCacheConfig(); config = 
regionServer.getConfiguration() &>
+<& 

hbase git commit: HBASE-18444 Add support for specifying custom meta table suffix (Ajay Jadhav)

2017-09-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/HBASE-18477 [created] 8fb2c9a46


HBASE-18444 Add support for specifying custom meta table suffix (Ajay Jadhav)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8fb2c9a4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8fb2c9a4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8fb2c9a4

Branch: refs/heads/HBASE-18477
Commit: 8fb2c9a468cceebc95febc1e7259d8e40fdd0271
Parents: 7fb52e7
Author: tedyu 
Authored: Wed Sep 6 10:00:55 2017 -0700
Committer: tedyu 
Committed: Wed Sep 6 10:00:55 2017 -0700

--
 .../java/org/apache/hadoop/hbase/TableName.java | 39 ++--
 .../apache/hadoop/hbase/util/TestTableName.java | 47 +++-
 2 files changed, 80 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8fb2c9a4/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
index 3477098..9addcd4 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/TableName.java
@@ -24,9 +24,13 @@ import java.util.Arrays;
 import java.util.Set;
 import java.util.concurrent.CopyOnWriteArraySet;
 
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.KeyValue.KVComparator;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.KeyValue.KVComparator;
 
 /**
  * Immutable POJO class for representing a table name.
@@ -54,6 +58,7 @@ import org.apache.hadoop.hbase.KeyValue.KVComparator;
  */
 @InterfaceAudience.Public
 public final class TableName implements Comparable {
+  private static final Log LOG = LogFactory.getLog(TableName.class);
 
   /** See {@link #createTableNameIfNecessary(ByteBuffer, ByteBuffer)} */
   private static final Set tableCache = new CopyOnWriteArraySet<>();
@@ -77,9 +82,11 @@ public final class TableName implements 
Comparable {
   "(?:(?:(?:"+VALID_NAMESPACE_REGEX+"\\"+NAMESPACE_DELIM+")?)" +
  "(?:"+VALID_TABLE_QUALIFIER_REGEX+"))";
 
-  /** The hbase:meta table's name. */
-  public static final TableName META_TABLE_NAME =
-  valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, "meta");
+  public static final String DEFAULT_META_TABLE_NAME_STR = "meta";
+  public static final String META_TABLE_SUFFIX = "hbase.meta.table.suffix";
+
+  /** The meta table's name. */
+  public static final TableName META_TABLE_NAME = 
getMetaTableName(HBaseConfiguration.create());
 
   /** The Namespace table's name. */
   public static final TableName NAMESPACE_TABLE_NAME =
@@ -551,4 +558,28 @@ public final class TableName implements 
Comparable {
 }
 return KeyValue.COMPARATOR;
   }
+
+  public static TableName getMetaTableName(Configuration conf) {
+String metaTableName = DEFAULT_META_TABLE_NAME_STR;
+String metaTableSuffix = conf.get(META_TABLE_SUFFIX, "");
+
+if(isValidMetaTableSuffix(metaTableSuffix)) {
+  metaTableName = DEFAULT_META_TABLE_NAME_STR + "_" + metaTableSuffix;
+}
+return (valueOf(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR, 
metaTableName));
+  }
+
+  public static boolean isValidMetaTableSuffix(String metaTableSuffix) {
+if(StringUtils.isBlank(metaTableSuffix)) {
+  return false;
+}
+
+try {
+  isLegalTableQualifierName(Bytes.toBytes(metaTableSuffix));
+} catch(IllegalArgumentException iae) {
+  LOG.warn("Invalid meta table suffix", iae);
+  return false;
+}
+return true;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/8fb2c9a4/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableName.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableName.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableName.java
index f585f47..1453bd2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableName.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestTableName.java
@@ -23,12 +23,16 @@ import java.util.Map;
 
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
 import static 

hbase git commit: HBASE-18697 Replace hbase-shaded-server jar with hbase-shaded-mapreduce jar.

2017-09-06 Thread appy
Repository: hbase
Updated Branches:
  refs/heads/branch-2 68ec2a9da -> e25401db4


HBASE-18697 Replace hbase-shaded-server jar with hbase-shaded-mapreduce jar.

Change-Id: I08b1af860c743249885adc0ba21885dd10f32d96


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e25401db
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e25401db
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e25401db

Branch: refs/heads/branch-2
Commit: e25401db493b523d654d27ce333ec9f32a13e417
Parents: 68ec2a9
Author: Apekshit Sharma 
Authored: Tue Aug 29 10:27:21 2017 -0700
Committer: Apekshit Sharma 
Committed: Wed Sep 6 08:58:39 2017 -0700

--
 .../hbase-shaded-check-invariants/pom.xml   |   2 +-
 hbase-shaded/hbase-shaded-mapreduce/pom.xml | 135 +++
 hbase-shaded/hbase-shaded-server/pom.xml| 135 ---
 hbase-shaded/pom.xml|   2 +-
 4 files changed, 137 insertions(+), 137 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e25401db/hbase-shaded/hbase-shaded-check-invariants/pom.xml
--
diff --git a/hbase-shaded/hbase-shaded-check-invariants/pom.xml 
b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
index 4079131..dd7f112 100644
--- a/hbase-shaded/hbase-shaded-check-invariants/pom.xml
+++ b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
@@ -41,7 +41,7 @@
 
 
   org.apache.hbase
-  hbase-shaded-server
+  hbase-shaded-mapreduce
   ${project.version}
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/e25401db/hbase-shaded/hbase-shaded-mapreduce/pom.xml
--
diff --git a/hbase-shaded/hbase-shaded-mapreduce/pom.xml 
b/hbase-shaded/hbase-shaded-mapreduce/pom.xml
new file mode 100644
index 000..02b2d8a
--- /dev/null
+++ b/hbase-shaded/hbase-shaded-mapreduce/pom.xml
@@ -0,0 +1,135 @@
+http://maven.apache.org/POM/4.0.0;
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance;
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd;>
+
+4.0.0
+
+hbase-shaded
+org.apache.hbase
+2.0.0-alpha3-SNAPSHOT
+..
+
+hbase-shaded-mapreduce
+Apache HBase - Shaded - MapReduce
+
+
+
+org.apache.maven.plugins
+maven-site-plugin
+
+true
+
+
+
+
+maven-assembly-plugin
+
+true
+
+
+
+
+
+
+org.apache.hbase
+hbase-mapreduce
+
+  
+  
+javax.xml.bind
+jaxb-api
+  
+  
+  
+javax.ws.rs
+javax.ws.rs-api
+  
+  
+com.sun.jersey
+jersey-server
+  
+  
+com.sun.jersey
+jersey-client
+  
+  
+com.sun.jersey
+jersey-core
+  
+  
+com.sun.jersey
+jersey-json
+  
+  
+com.sun.jersey.contribs
+jersey-guice
+  
+  
+  
+org.eclipse.jetty
+jetty-server
+  
+  
+org.eclipse.jetty
+jetty-servlet
+  
+  
+org.eclipse.jetty
+jetty-util
+  
+  
+org.eclipse.jetty
+jetty-util-ajax
+  
+  
+org.eclipse.jetty
+jetty-jsp
+  
+  
+org.eclipse.jetty
+jetty-webapp
+  
+  
+org.glassfish.jersey.containers
+jersey-container-servlet-core
+  
+  
+org.glassfish.jersey.media
+jersey-media-json-jackson1
+  
+
+
+
+
+
+
+release
+
+
+
+org.apache.maven.plugins
+maven-shade-plugin
+
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/hbase/blob/e25401db/hbase-shaded/hbase-shaded-server/pom.xml

hbase git commit: HBASE-18375: Fix the bug where the pool chunks from ChunkCreator are deallocated and not returned to pool, because there is no reference to them

2017-09-06 Thread anastasia
Repository: hbase
Updated Branches:
  refs/heads/branch-2 d7a74a75a -> 68ec2a9da


HBASE-18375: Fix the bug where the pool chunks from ChunkCreator are 
deallocated and not returned to pool, because there is no reference to them


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/68ec2a9d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/68ec2a9d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/68ec2a9d

Branch: refs/heads/branch-2
Commit: 68ec2a9da022f7824e9a45ef89a0c4b8bcb838f3
Parents: d7a74a7
Author: anastas 
Authored: Wed Sep 6 18:48:53 2017 +0300
Committer: anastas 
Committed: Wed Sep 6 18:48:53 2017 +0300

--
 .../hadoop/hbase/regionserver/ChunkCreator.java | 127 ---
 .../hbase/regionserver/CompactingMemStore.java  |  24 +++-
 .../hbase/regionserver/CompactionPipeline.java  |   4 +-
 .../hbase/regionserver/MemStoreLABImpl.java |  10 ++
 .../hbase/regionserver/TestMemStoreLAB.java |  12 +-
 .../TestMemstoreLABWithoutPool.java |   3 +-
 6 files changed, 124 insertions(+), 56 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/68ec2a9d/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
index 38d7136..61cf2b3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ChunkCreator.java
@@ -50,10 +50,9 @@ public class ChunkCreator {
   // monotonically increasing chunkid
   private AtomicInteger chunkID = new AtomicInteger(1);
   // maps the chunk against the monotonically increasing chunk id. We need to 
preserve the
-  // natural ordering of the key
-  // CellChunkMap creation should convert the soft ref to hard reference
-  private Map chunkIdMap =
-  new ConcurrentHashMap();
+  // natural ordering of the key. It also helps to protect from GC.
+  private Map chunkIdMap = new ConcurrentHashMap();
+
   private final int chunkSize;
   private final boolean offheap;
   @VisibleForTesting
@@ -75,7 +74,7 @@ public class ChunkCreator {
   }
 
   /**
-   * Initializes the instance of MSLABChunkCreator
+   * Initializes the instance of ChunkCreator
* @param chunkSize the chunkSize
* @param offheap indicates if the chunk is to be created offheap or not
* @param globalMemStoreSize  the global memstore size
@@ -100,10 +99,19 @@ public class ChunkCreator {
   }
 
   /**
-   * Creates and inits a chunk.
+   * Creates and inits a chunk. The default implementation.
* @return the chunk that was initialized
*/
   Chunk getChunk() {
+return getChunk(CompactingMemStore.IndexType.ARRAY_MAP);
+  }
+
+  /**
+   * Creates and inits a chunk.
+   * @return the chunk that was initialized
+   * @param chunkIndexType whether the requested chunk is going to be used 
with CellChunkMap index
+   */
+  Chunk getChunk(CompactingMemStore.IndexType chunkIndexType) {
 Chunk chunk = null;
 if (pool != null) {
   //  the pool creates the chunk internally. The chunk#init() call happens 
here
@@ -117,44 +125,49 @@ public class ChunkCreator {
   }
 }
 if (chunk == null) {
-  chunk = createChunk();
+  // the second boolean parameter means:
+  // if CellChunkMap index is requested, put allocated on demand chunk 
mapping into chunkIdMap
+  chunk = createChunk(false, chunkIndexType);
 }
-// put this chunk into the chunkIdMap
-this.chunkIdMap.put(chunk.getId(), new SoftReference<>(chunk));
+
 // now we need to actually do the expensive memory allocation step in case 
of a new chunk,
 // else only the offset is set to the beginning of the chunk to accept 
allocations
 chunk.init();
 return chunk;
   }
 
-  private Chunk createChunk() {
-return createChunk(false);
+  private Chunk createChunkForPool() {
+return createChunk(true, CompactingMemStore.IndexType.ARRAY_MAP);
   }
 
   /**
* Creates the chunk either onheap or offheap
* @param pool indicates if the chunks have to be created which will be used 
by the Pool
+   * @param chunkIndexType
* @return the chunk
*/
-  private Chunk createChunk(boolean pool) {
+  private Chunk createChunk(boolean pool, CompactingMemStore.IndexType 
chunkIndexType) {
+Chunk chunk = null;
 int id = chunkID.getAndIncrement();
 assert id > 0;
 // do not create offheap chunk on demand
 if (pool && this.offheap) 

hbase git commit: HBASE-18697 Replace hbase-shaded-server jar with hbase-shaded-mapreduce jar.

2017-09-06 Thread appy
Repository: hbase
Updated Branches:
  refs/heads/master 7fb52e73f -> fd49a9826


HBASE-18697 Replace hbase-shaded-server jar with hbase-shaded-mapreduce jar.

Change-Id: I08b1af860c743249885adc0ba21885dd10f32d96


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fd49a982
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fd49a982
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fd49a982

Branch: refs/heads/master
Commit: fd49a9826a4bb5dc3783b40763144e6395dde1e5
Parents: 7fb52e7
Author: Apekshit Sharma 
Authored: Tue Aug 29 10:27:21 2017 -0700
Committer: Apekshit Sharma 
Committed: Wed Sep 6 08:28:06 2017 -0700

--
 .../hbase-shaded-check-invariants/pom.xml   |   2 +-
 hbase-shaded/hbase-shaded-mapreduce/pom.xml | 135 +++
 hbase-shaded/hbase-shaded-server/pom.xml| 135 ---
 hbase-shaded/pom.xml|   2 +-
 4 files changed, 137 insertions(+), 137 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/fd49a982/hbase-shaded/hbase-shaded-check-invariants/pom.xml
--
diff --git a/hbase-shaded/hbase-shaded-check-invariants/pom.xml 
b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
index 3444199..69275a7 100644
--- a/hbase-shaded/hbase-shaded-check-invariants/pom.xml
+++ b/hbase-shaded/hbase-shaded-check-invariants/pom.xml
@@ -41,7 +41,7 @@
 
 
   org.apache.hbase
-  hbase-shaded-server
+  hbase-shaded-mapreduce
   ${project.version}
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/fd49a982/hbase-shaded/hbase-shaded-mapreduce/pom.xml
--
diff --git a/hbase-shaded/hbase-shaded-mapreduce/pom.xml 
b/hbase-shaded/hbase-shaded-mapreduce/pom.xml
new file mode 100644
index 000..041ccfe
--- /dev/null
+++ b/hbase-shaded/hbase-shaded-mapreduce/pom.xml
@@ -0,0 +1,135 @@
+http://maven.apache.org/POM/4.0.0;
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance;
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd;>
+
+4.0.0
+
+hbase-shaded
+org.apache.hbase
+3.0.0-SNAPSHOT
+..
+
+hbase-shaded-mapreduce
+Apache HBase - Shaded - MapReduce
+
+
+
+org.apache.maven.plugins
+maven-site-plugin
+
+true
+
+
+
+
+maven-assembly-plugin
+
+true
+
+
+
+
+
+
+org.apache.hbase
+hbase-mapreduce
+
+  
+  
+javax.xml.bind
+jaxb-api
+  
+  
+  
+javax.ws.rs
+javax.ws.rs-api
+  
+  
+com.sun.jersey
+jersey-server
+  
+  
+com.sun.jersey
+jersey-client
+  
+  
+com.sun.jersey
+jersey-core
+  
+  
+com.sun.jersey
+jersey-json
+  
+  
+com.sun.jersey.contribs
+jersey-guice
+  
+  
+  
+org.eclipse.jetty
+jetty-server
+  
+  
+org.eclipse.jetty
+jetty-servlet
+  
+  
+org.eclipse.jetty
+jetty-util
+  
+  
+org.eclipse.jetty
+jetty-util-ajax
+  
+  
+org.eclipse.jetty
+jetty-jsp
+  
+  
+org.eclipse.jetty
+jetty-webapp
+  
+  
+org.glassfish.jersey.containers
+jersey-container-servlet-core
+  
+  
+org.glassfish.jersey.media
+jersey-media-json-jackson1
+  
+
+
+
+
+
+
+release
+
+
+
+org.apache.maven.plugins
+maven-shade-plugin
+
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/hbase/blob/fd49a982/hbase-shaded/hbase-shaded-server/pom.xml

[39/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html
index 5e8e1b3..a109a5c 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html
@@ -1274,7 +1274,7 @@ implements 
 
 CACHE_FIXED_OVERHEAD
-public static finallong CACHE_FIXED_OVERHEAD
+public static finallong CACHE_FIXED_OVERHEAD
 
 
 
@@ -1639,9 +1639,11 @@ implements 
 
 getMaxSize
-publiclonggetMaxSize()
+publiclonggetMaxSize()
 Get the maximum size of this cache.
 
+Specified by:
+getMaxSizein
 interfaceBlockCache
 Returns:
 max size in bytes
 
@@ -1653,7 +1655,7 @@ implements 
 
 getCurrentSize
-publiclonggetCurrentSize()
+publiclonggetCurrentSize()
 Description copied from 
interface:BlockCache
 Returns the occupied size of the block cache, in 
bytes.
 
@@ -1670,7 +1672,7 @@ implements 
 
 getCurrentDataSize
-publiclonggetCurrentDataSize()
+publiclonggetCurrentDataSize()
 Description copied from 
interface:BlockCache
 Returns the occupied size of data blocks, in bytes.
 
@@ -1687,7 +1689,7 @@ implements 
 
 getFreeSize
-publiclonggetFreeSize()
+publiclonggetFreeSize()
 Description copied from 
interface:BlockCache
 Returns the free size of the block cache, in bytes.
 
@@ -1704,7 +1706,7 @@ implements 
 
 size
-publiclongsize()
+publiclongsize()
 Description copied from 
interface:BlockCache
 Returns the total size of the block cache, in bytes.
 
@@ -1721,7 +1723,7 @@ implements 
 
 getBlockCount
-publiclonggetBlockCount()
+publiclonggetBlockCount()
 Description copied from 
interface:BlockCache
 Returns the number of blocks currently cached in the block 
cache.
 
@@ -1738,7 +1740,7 @@ implements 
 
 getDataBlockCount
-publiclonggetDataBlockCount()
+publiclonggetDataBlockCount()
 Description copied from 
interface:BlockCache
 Returns the number of data blocks currently cached in the 
block cache.
 
@@ -1755,7 +1757,7 @@ implements 
 
 getEvictionThread
-LruBlockCache.EvictionThreadgetEvictionThread()
+LruBlockCache.EvictionThreadgetEvictionThread()
 
 
 
@@ -1764,7 +1766,7 @@ implements 
 
 logStats
-publicvoidlogStats()
+publicvoidlogStats()
 
 
 
@@ -1773,7 +1775,7 @@ implements 
 
 getStats
-publicCacheStatsgetStats()
+publicCacheStatsgetStats()
 Get counter statistics for this cache.
 
  Includes: total accesses, hits, misses, evicted blocks, and runs
@@ -1792,7 +1794,7 @@ implements 
 
 heapSize
-publiclongheapSize()
+publiclongheapSize()
 
 Specified by:
 heapSizein
 interfaceHeapSize
@@ -1808,7 +1810,7 @@ implements 
 
 calculateOverhead
-private staticlongcalculateOverhead(longmaxSize,
+private staticlongcalculateOverhead(longmaxSize,
   longblockSize,
   intconcurrency)
 
@@ -1819,7 +1821,7 @@ implements 
 
 iterator
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorCachedBlockiterator()
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorCachedBlockiterator()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true#iterator--;
 title="class or interface in java.lang">iteratorin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/lang/Iterable.html?is-external=true;
 title="class or interface in java.lang">IterableCachedBlock
@@ -1836,7 +1838,7 @@ implements 
 
 acceptableSize
-longacceptableSize()
+longacceptableSize()
 
 
 
@@ -1845,7 +1847,7 @@ implements 
 
 minSize
-privatelongminSize()
+privatelongminSize()
 
 
 
@@ -1854,7 +1856,7 @@ implements 
 
 singleSize
-privatelongsingleSize()
+privatelongsingleSize()
 
 
 
@@ -1863,7 +1865,7 @@ implements 
 
 multiSize
-privatelongmultiSize()
+privatelongmultiSize()
 
 
 
@@ -1872,7 +1874,7 @@ implements 
 
 memorySize
-privatelongmemorySize()
+privatelongmemorySize()
 
 
 
@@ -1881,7 +1883,7 @@ implements 
 
 shutdown
-publicvoidshutdown()
+publicvoidshutdown()
 Description copied from 
interface:BlockCache
 Shutdown the cache.
 
@@ -1896,7 +1898,7 @@ implements 
 
 clearCache
-publicvoidclearCache()
+publicvoidclearCache()
 Clears the cache. Used in tests.
 
 
@@ -1906,7 +1908,7 @@ implements 
 
 getCachedFileNamesForTest
-http://docs.oracle.com/javase/8/docs/api/java/util/SortedSet.html?is-external=true;
 title="class or interface in java.util">SortedSethttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetCachedFileNamesForTest()
+http://docs.oracle.com/javase/8/docs/api/java/util/SortedSet.html?is-external=true;
 title="class or interface in 

[19/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
index 8609df1..601ad09 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntryGroup.html
@@ -361,1266 +361,1267 @@
 353return this.cacheEnabled;
 354  }
 355
-356  public long getMaxSize() {
-357return this.cacheCapacity;
-358  }
-359
-360  public String getIoEngine() {
-361return ioEngine.toString();
-362  }
-363
-364  /**
-365   * Get the IOEngine from the IO engine 
name
-366   * @param ioEngineName
-367   * @param capacity
-368   * @param persistencePath
-369   * @return the IOEngine
-370   * @throws IOException
-371   */
-372  private IOEngine 
getIOEngineFromName(String ioEngineName, long capacity, String 
persistencePath)
-373  throws IOException {
-374if (ioEngineName.startsWith("file:") 
|| ioEngineName.startsWith("files:")) {
-375  // In order to make the usage 
simple, we only need the prefix 'files:' in
-376  // document whether one or multiple 
file(s), but also support 'file:' for
-377  // the compatibility
-378  String[] filePaths = 
ioEngineName.substring(ioEngineName.indexOf(":") + 1)
-379  
.split(FileIOEngine.FILE_DELIMITER);
-380  return new FileIOEngine(capacity, 
persistencePath != null, filePaths);
-381} else if 
(ioEngineName.startsWith("offheap")) {
-382  return new 
ByteBufferIOEngine(capacity, true);
-383} else if 
(ioEngineName.startsWith("heap")) {
-384  return new 
ByteBufferIOEngine(capacity, false);
-385} else if 
(ioEngineName.startsWith("mmap:")) {
-386  return new 
FileMmapEngine(ioEngineName.substring(5), capacity);
-387} else {
-388  throw new 
IllegalArgumentException(
-389  "Don't understand io engine 
name for cache - prefix with file:, heap or offheap");
-390}
-391  }
-392
-393  /**
-394   * Cache the block with the specified 
name and buffer.
-395   * @param cacheKey block's cache key
-396   * @param buf block buffer
-397   */
-398  @Override
-399  public void cacheBlock(BlockCacheKey 
cacheKey, Cacheable buf) {
-400cacheBlock(cacheKey, buf, false, 
false);
-401  }
-402
-403  /**
-404   * Cache the block with the specified 
name and buffer.
-405   * @param cacheKey block's cache key
-406   * @param cachedItem block buffer
-407   * @param inMemory if block is 
in-memory
-408   * @param cacheDataInL1
-409   */
-410  @Override
-411  public void cacheBlock(BlockCacheKey 
cacheKey, Cacheable cachedItem, boolean inMemory,
-412  final boolean cacheDataInL1) {
-413cacheBlockWithWait(cacheKey, 
cachedItem, inMemory, wait_when_cache);
-414  }
-415
-416  /**
-417   * Cache the block to ramCache
-418   * @param cacheKey block's cache key
-419   * @param cachedItem block buffer
-420   * @param inMemory if block is 
in-memory
-421   * @param wait if true, blocking wait 
when queue is full
-422   */
-423  public void 
cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean 
inMemory,
-424  boolean wait) {
-425if (LOG.isTraceEnabled()) 
LOG.trace("Caching key=" + cacheKey + ", item=" + cachedItem);
-426if (!cacheEnabled) {
-427  return;
-428}
-429
-430if (backingMap.containsKey(cacheKey)) 
{
-431  return;
-432}
-433
-434/*
-435 * Stuff the entry into the RAM cache 
so it can get drained to the persistent store
-436 */
-437RAMQueueEntry re =
-438new RAMQueueEntry(cacheKey, 
cachedItem, accessCount.incrementAndGet(), inMemory);
-439if (ramCache.putIfAbsent(cacheKey, 
re) != null) {
-440  return;
-441}
-442int queueNum = (cacheKey.hashCode() 
 0x7FFF) % writerQueues.size();
-443BlockingQueueRAMQueueEntry bq 
= writerQueues.get(queueNum);
-444boolean successfulAddition = false;
-445if (wait) {
-446  try {
-447successfulAddition = bq.offer(re, 
DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);
-448  } catch (InterruptedException e) 
{
-449
Thread.currentThread().interrupt();
-450  }
-451} else {
-452  successfulAddition = 
bq.offer(re);
-453}
-454if (!successfulAddition) {
-455  ramCache.remove(cacheKey);
-456  cacheStats.failInsert();
-457} else {
-458  
this.blockNumber.incrementAndGet();
-459  
this.heapSize.addAndGet(cachedItem.heapSize());
-460  blocksByHFile.add(cacheKey);
-461}
-462  }
-463
-464  /**
-465   * Get the buffer of the block with the 
specified key.
-466   * @param key block's cache key
-467   * @param caching true if the 

[41/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
index 2b43c26..95f80da 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Result.html
@@ -1998,7 +1998,7 @@ service.
 
 
 private void
-RSRpcServices.scan(HBaseRpcControllercontroller,
+RSRpcServices.scan(HBaseRpcControllercontroller,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequestrequest,
 RSRpcServices.RegionScannerHolderrsh,
 longmaxQuotaResultSize,
@@ -2006,7 +2006,7 @@ service.
 intlimitOfRows,
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListResultresults,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.Builderbuilder,
-org.apache.commons.lang.mutable.MutableObjectlastBlock,
+org.apache.commons.lang3.mutable.MutableObjectlastBlock,
 RpcCallContextcontext)
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html 
b/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
index 9ef46dd..149bfdf 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/class-use/Scan.html
@@ -2071,26 +2071,6 @@ service.
 
 
 boolean
-MutableSegment.shouldSeek(Scanscan,
-  longoldestUnexpiredTS)
-
-
-boolean
-ImmutableSegment.shouldSeek(Scanscan,
-  longoldestUnexpiredTS)
-
-
-abstract boolean
-Segment.shouldSeek(Scanscan,
-  longoldestUnexpiredTS)
-
-
-boolean
-CompositeImmutableSegment.shouldSeek(Scanscan,
-  longoldestUnexpiredTS)
-
-
-boolean
 StoreFileScanner.shouldUseScanner(Scanscan,
 Storestore,
 longoldestUnexpiredTS)

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index 99bcf7d..b3575c3 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -542,25 +542,25 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.client.MobCompactPartitionPolicy
-org.apache.hadoop.hbase.client.CompactionState
+org.apache.hadoop.hbase.client.MasterSwitchType
+org.apache.hadoop.hbase.client.SnapshotType
+org.apache.hadoop.hbase.client.RegionLocateType
 org.apache.hadoop.hbase.client.HBaseAdmin.ReplicationState
-org.apache.hadoop.hbase.client.AsyncProcessTask.SubmittedRows
+org.apache.hadoop.hbase.client.ScannerCallable.MoreResults
+org.apache.hadoop.hbase.client.IsolationLevel
 org.apache.hadoop.hbase.client.Consistency
+org.apache.hadoop.hbase.client.AsyncProcessTask.SubmittedRows
 org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState
-org.apache.hadoop.hbase.client.RegionLocateType
-org.apache.hadoop.hbase.client.TableState.State
-org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState
-org.apache.hadoop.hbase.client.ScannerCallable.MoreResults
-org.apache.hadoop.hbase.client.AbstractResponse.ResponseType
 org.apache.hadoop.hbase.client.CompactType
-org.apache.hadoop.hbase.client.Durability
 org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.Retry
 org.apache.hadoop.hbase.client.Scan.ReadType
+org.apache.hadoop.hbase.client.AbstractResponse.ResponseType
+org.apache.hadoop.hbase.client.TableState.State
+org.apache.hadoop.hbase.client.Durability
 org.apache.hadoop.hbase.client.RequestController.ReturnCode
-org.apache.hadoop.hbase.client.MasterSwitchType
-org.apache.hadoop.hbase.client.SnapshotType
-org.apache.hadoop.hbase.client.IsolationLevel
+org.apache.hadoop.hbase.client.CompactionState
+org.apache.hadoop.hbase.client.MobCompactPartitionPolicy
+org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState
 
 
 


[25/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
index 64bce33..f8a036f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.SimpleReporter.html
@@ -53,7 +53,7 @@
 045import org.apache.commons.cli.Options;
 046import 
org.apache.commons.cli.ParseException;
 047import 
org.apache.commons.cli.PosixParser;
-048import 
org.apache.commons.lang.StringUtils;
+048import 
org.apache.commons.lang3.StringUtils;
 049import org.apache.commons.logging.Log;
 050import 
org.apache.commons.logging.LogFactory;
 051import 
org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
index 64bce33..f8a036f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.html
@@ -53,7 +53,7 @@
 045import org.apache.commons.cli.Options;
 046import 
org.apache.commons.cli.ParseException;
 047import 
org.apache.commons.cli.PosixParser;
-048import 
org.apache.commons.lang.StringUtils;
+048import 
org.apache.commons.lang3.StringUtils;
 049import org.apache.commons.logging.Log;
 050import 
org.apache.commons.logging.LogFactory;
 051import 
org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.BlockBucket.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.BlockBucket.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.BlockBucket.html
index 55c1e83..989423f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.BlockBucket.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.BlockBucket.html
@@ -848,375 +848,377 @@
 840   *
 841   * @return max size in bytes
 842   */
-843  public long getMaxSize() {
-844return this.maxSize;
-845  }
-846
-847  @Override
-848  public long getCurrentSize() {
-849return this.size.get();
-850  }
-851
-852  @Override
-853  public long getCurrentDataSize() {
-854return this.dataBlockSize.get();
-855  }
-856
-857  @Override
-858  public long getFreeSize() {
-859return getMaxSize() - 
getCurrentSize();
-860  }
-861
-862  @Override
-863  public long size() {
-864return getMaxSize();
-865  }
-866
-867  @Override
-868  public long getBlockCount() {
-869return this.elements.get();
-870  }
-871
-872  @Override
-873  public long getDataBlockCount() {
-874return 
this.dataBlockElements.get();
-875  }
-876
-877  EvictionThread getEvictionThread() {
-878return this.evictionThread;
-879  }
-880
-881  /*
-882   * Eviction thread.  Sits in waiting 
state until an eviction is triggered
-883   * when the cache size grows above the 
acceptable level.p
-884   *
-885   * Thread is triggered into action by 
{@link LruBlockCache#runEviction()}
-886   */
-887  static class EvictionThread extends 
HasThread {
-888
-889private 
WeakReferenceLruBlockCache cache;
-890private volatile boolean go = true;
-891// flag set after enter the run 
method, used for test
-892private boolean enteringRun = 
false;
-893
-894public EvictionThread(LruBlockCache 
cache) {
-895  
super(Thread.currentThread().getName() + ".LruBlockCache.EvictionThread");
-896  setDaemon(true);
-897  this.cache = new 
WeakReference(cache);
-898}
-899
-900@Override
-901public void run() {
-902  enteringRun = true;
-903  while (this.go) {
-904synchronized (this) {
-905  try {
-906this.wait(1000 * 10/*Don't 
wait for ever*/);
-907  } catch (InterruptedException 
e) {
-908LOG.warn("Interrupted 
eviction thread ", e);
-909
Thread.currentThread().interrupt();
-910  }
-911}
-912LruBlockCache cache = 
this.cache.get();
-913if (cache == null) break;
-914cache.evict();
-915  }
-916}
-917
-918
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NN_NAKED_NOTIFY",
-919justification="This is what we 
want")
-920public 

[36/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
index 5704d34..4e441e4 100644
--- a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
+++ b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
@@ -117,7 +117,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public abstract class BaseLoadBalancer
+public abstract class BaseLoadBalancer
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements LoadBalancer
 The base class for load balancers. It provides the the 
functions used to by
@@ -479,7 +479,7 @@ implements 
 
 MIN_SERVER_BALANCE
-protected static finalint MIN_SERVER_BALANCE
+protected static finalint MIN_SERVER_BALANCE
 
 See Also:
 Constant
 Field Values
@@ -492,7 +492,7 @@ implements 
 
 stopped
-private volatileboolean stopped
+private volatileboolean stopped
 
 
 
@@ -501,7 +501,7 @@ implements 
 
 EMPTY_REGION_LIST
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo EMPTY_REGION_LIST
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListHRegionInfo EMPTY_REGION_LIST
 
 
 
@@ -510,7 +510,7 @@ implements 
 
 IDLE_SERVER_PREDICATOR
-static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/function/Predicate.html?is-external=true;
 title="class or interface in java.util.function">PredicateServerLoad IDLE_SERVER_PREDICATOR
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/function/Predicate.html?is-external=true;
 title="class or interface in java.util.function">PredicateServerLoad IDLE_SERVER_PREDICATOR
 
 
 
@@ -519,7 +519,7 @@ implements 
 
 regionFinder
-protected finalRegionLocationFinder regionFinder
+protected finalRegionLocationFinder regionFinder
 
 
 
@@ -528,7 +528,7 @@ implements 
 
 slop
-protectedfloat slop
+protectedfloat slop
 
 
 
@@ -537,7 +537,7 @@ implements 
 
 overallSlop
-protectedfloat overallSlop
+protectedfloat overallSlop
 
 
 
@@ -546,7 +546,7 @@ implements 
 
 config
-protectedorg.apache.hadoop.conf.Configuration config
+protectedorg.apache.hadoop.conf.Configuration config
 
 
 
@@ -555,7 +555,7 @@ implements 
 
 rackManager
-protectedRackManager rackManager
+protectedRackManager rackManager
 
 
 
@@ -564,7 +564,7 @@ implements 
 
 RANDOM
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Random RANDOM
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/util/Random.html?is-external=true;
 title="class or interface in java.util">Random RANDOM
 
 
 
@@ -573,7 +573,7 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -582,7 +582,7 @@ implements 
 
 metricsBalancer
-protectedMetricsBalancer 
metricsBalancer
+protectedMetricsBalancer 
metricsBalancer
 
 
 
@@ -591,7 +591,7 @@ implements 
 
 clusterStatus
-protectedClusterStatus clusterStatus
+protectedClusterStatus clusterStatus
 
 
 
@@ -600,7 +600,7 @@ implements 
 
 masterServerName
-protectedServerName masterServerName
+protectedServerName masterServerName
 
 
 
@@ -609,7 +609,7 @@ implements 
 
 services
-protectedMasterServices services
+protectedMasterServices services
 
 
 
@@ -618,7 +618,7 @@ implements 
 
 tablesOnMaster
-protectedboolean tablesOnMaster
+protectedboolean tablesOnMaster
 
 
 
@@ -627,7 +627,7 @@ implements 
 
 onlySystemTablesOnMaster
-protectedboolean onlySystemTablesOnMaster
+protectedboolean onlySystemTablesOnMaster
 
 
 
@@ -644,7 +644,7 @@ implements 
 
 BaseLoadBalancer
-protectedBaseLoadBalancer()
+protectedBaseLoadBalancer()
 The constructor that uses the basic MetricsBalancer
 
 
@@ -654,7 +654,7 @@ implements 
 
 BaseLoadBalancer
-protectedBaseLoadBalancer(MetricsBalancermetricsBalancer)
+protectedBaseLoadBalancer(MetricsBalancermetricsBalancer)
 This Constructor accepts an instance of MetricsBalancer,
  which will be used instead of creating a new one
 
@@ -673,7 +673,7 @@ implements 
 
 setConf
-publicvoidsetConf(org.apache.hadoop.conf.Configurationconf)
+publicvoidsetConf(org.apache.hadoop.conf.Configurationconf)
 
 Specified by:
 setConfin 
interfaceorg.apache.hadoop.conf.Configurable
@@ -686,7 +686,7 @@ implements 
 
 setSlop
-protectedvoidsetSlop(org.apache.hadoop.conf.Configurationconf)
+protectedvoidsetSlop(org.apache.hadoop.conf.Configurationconf)
 
 
 
@@ -695,7 +695,7 @@ implements 
 
 shouldBeOnMaster

[02/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.html
index 9aa3f41..42f2cda 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.html
@@ -215,259 +215,300 @@
 207   */
 208  int getFlushQueueSize();
 209
-210  /**
-211   * Get the size (in bytes) of the block 
cache that is free.
-212   */
-213  long getBlockCacheFreeSize();
-214
-215  /**
-216   * Get the number of items in the block 
cache.
-217   */
-218  long getBlockCacheCount();
-219
-220  /**
-221   * Get the total size (in bytes) of the 
block cache.
-222   */
-223  long getBlockCacheSize();
-224
-225  /**
-226   * Get the count of hits to the block 
cache
-227   */
-228  long getBlockCacheHitCount();
-229
-230  /**
-231   * Get the count of hits to primary 
replica in the block cache
-232   */
-233  long getBlockCachePrimaryHitCount();
-234
-235  /**
-236   * Get the count of misses to the block 
cache.
-237   */
-238  long getBlockCacheMissCount();
-239
-240  /**
-241   * Get the count of misses to primary 
replica in the block cache.
-242   */
-243  long getBlockCachePrimaryMissCount();
-244
-245  /**
-246   * Get the number of items evicted from 
the block cache.
-247   */
-248  long getBlockCacheEvictedCount();
-249
-250  /**
-251   * Get the number of items evicted from 
primary replica in the block cache.
-252   */
-253  long 
getBlockCachePrimaryEvictedCount();
-254
+210  public long getMemstoreLimit();
+211  /**
+212   * Get the size (in bytes) of the block 
cache that is free.
+213   */
+214  long getBlockCacheFreeSize();
+215
+216  /**
+217   * Get the number of items in the block 
cache.
+218   */
+219  long getBlockCacheCount();
+220
+221  /**
+222   * Get the total size (in bytes) of the 
block cache.
+223   */
+224  long getBlockCacheSize();
+225
+226  /**
+227   * Get the count of hits to the block 
cache
+228   */
+229  long getBlockCacheHitCount();
+230
+231  /**
+232   * Get the count of hits to primary 
replica in the block cache
+233   */
+234  long getBlockCachePrimaryHitCount();
+235
+236  /**
+237   * Get the count of misses to the block 
cache.
+238   */
+239  long getBlockCacheMissCount();
+240
+241  /**
+242   * Get the count of misses to primary 
replica in the block cache.
+243   */
+244  long getBlockCachePrimaryMissCount();
+245
+246  /**
+247   * Get the number of items evicted from 
the block cache.
+248   */
+249  long getBlockCacheEvictedCount();
+250
+251  /**
+252   * Get the number of items evicted from 
primary replica in the block cache.
+253   */
+254  long 
getBlockCachePrimaryEvictedCount();
 255
-256  /**
-257   * Get the percent of all requests that 
hit the block cache.
-258   */
-259  double getBlockCacheHitPercent();
-260
-261  /**
-262   * Get the percent of requests with the 
block cache turned on that hit the block cache.
-263   */
-264  double 
getBlockCacheHitCachingPercent();
-265
-266  /**
-267   * Number of cache insertions that 
failed.
-268   */
-269  long getBlockCacheFailedInsertions();
-270
-271  /**
-272   * Force a re-computation of the 
metrics.
-273   */
-274  void forceRecompute();
-275
-276  /**
-277   * Get the amount of time that updates 
were blocked.
-278   */
-279  long getUpdatesBlockedTime();
-280
-281  /**
-282   * Get the number of cells flushed to 
disk.
-283   */
-284  long getFlushedCellsCount();
-285
-286  /**
-287   * Get the number of cells processed 
during minor compactions.
-288   */
-289  long getCompactedCellsCount();
-290
-291  /**
-292   * Get the number of cells processed 
during major compactions.
-293   */
-294  long getMajorCompactedCellsCount();
-295
-296  /**
-297   * Get the total amount of data flushed 
to disk, in bytes.
-298   */
-299  long getFlushedCellsSize();
-300
-301  /**
-302   * Get the total amount of data 
processed during minor compactions, in bytes.
-303   */
-304  long getCompactedCellsSize();
-305
-306  /**
-307   * Get the total amount of data 
processed during major compactions, in bytes.
-308   */
-309  long getMajorCompactedCellsSize();
-310
-311  /**
-312   * Gets the number of cells moved to 
mob during compaction.
-313   */
-314  long getCellsCountCompactedToMob();
-315
-316  /**
-317   * Gets the number of cells moved from 
mob during compaction.
-318   */
-319  long getCellsCountCompactedFromMob();
-320
-321  /**
-322   * Gets the total amount of cells moved 
to mob during compaction, in bytes.
-323   */
-324  long getCellsSizeCompactedToMob();
-325
-326  /**
-327   * Gets the total amount of cells moved 
from mob during 

[27/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
index bf17b37..4af2d9a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CacheConfig.ExternalBlockCaches.html
@@ -555,182 +555,199 @@
 547  // Clear this if in tests you'd make 
more than one block cache instance.
 548  @VisibleForTesting
 549  static BlockCache 
GLOBAL_BLOCK_CACHE_INSTANCE;
-550  private static LruBlockCache 
GLOBAL_L1_CACHE_INSTANCE;
-551
-552  /** Boolean whether we have disabled 
the block cache entirely. */
-553  @VisibleForTesting
-554  static boolean blockCacheDisabled = 
false;
-555
-556  /**
-557   * @param c Configuration to use.
-558   * @return An L1 instance.  Currently 
an instance of LruBlockCache.
-559   */
-560  public static LruBlockCache getL1(final 
Configuration c) {
-561return getL1Internal(c);
-562  }
-563
-564  /**
-565   * @param c Configuration to use.
-566   * @return An L1 instance.  Currently 
an instance of LruBlockCache.
-567   */
-568  private synchronized static 
LruBlockCache getL1Internal(final Configuration c) {
-569if (GLOBAL_L1_CACHE_INSTANCE != null) 
return GLOBAL_L1_CACHE_INSTANCE;
-570final long lruCacheSize = 
MemorySizeUtil.getLruCacheSize(c);
-571if (lruCacheSize  0) {
-572  blockCacheDisabled = true;
-573}
-574if (blockCacheDisabled) return 
null;
-575int blockSize = 
c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE);
-576LOG.info("Allocating LruBlockCache 
size=" +
-577  StringUtils.byteDesc(lruCacheSize) 
+ ", blockSize=" + StringUtils.byteDesc(blockSize));
-578GLOBAL_L1_CACHE_INSTANCE = new 
LruBlockCache(lruCacheSize, blockSize, true, c);
-579return GLOBAL_L1_CACHE_INSTANCE;
-580  }
-581
-582  /**
-583   * @param c Configuration to use.
-584   * @return Returns L2 block cache 
instance (for now it is BucketCache BlockCache all the time)
-585   * or null if not supposed to be a 
L2.
-586   */
-587  @VisibleForTesting
-588  static BlockCache getL2(final 
Configuration c) {
-589final boolean useExternal = 
c.getBoolean(EXTERNAL_BLOCKCACHE_KEY, EXTERNAL_BLOCKCACHE_DEFAULT);
-590if (LOG.isDebugEnabled()) {
-591  LOG.debug("Trying to use " + 
(useExternal?" External":" Internal") + " l2 cache");
-592}
-593
-594// If we want to use an external 
block cache then create that.
-595if (useExternal) {
-596  return getExternalBlockcache(c);
-597}
-598// otherwise use the bucket cache.
-599return getBucketCache(c);
-600  }
-601
-602  private static BlockCache 
getExternalBlockcache(Configuration c) {
-603Class klass = null;
-604
-605// Get the class, from the config. 
s
-606try {
-607  klass = 
ExternalBlockCaches.valueOf(c.get(EXTERNAL_BLOCKCACHE_CLASS_KEY, 
"memcache")).clazz;
-608} catch (IllegalArgumentException 
exception) {
-609  try {
-610klass = 
c.getClass(EXTERNAL_BLOCKCACHE_CLASS_KEY, Class.forName(
-611
"org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache"));
-612  } catch (ClassNotFoundException e) 
{
-613return null;
-614  }
+550  private static LruBlockCache 
GLOBAL_L1_CACHE_INSTANCE = null;
+551  private static BlockCache 
GLOBAL_L2_CACHE_INSTANCE = null;
+552
+553  /** Boolean whether we have disabled 
the block cache entirely. */
+554  @VisibleForTesting
+555  static boolean blockCacheDisabled = 
false;
+556
+557  /**
+558   * @param c Configuration to use.
+559   * @return An L1 instance.  Currently 
an instance of LruBlockCache.
+560   */
+561  public static LruBlockCache getL1(final 
Configuration c) {
+562return getL1Internal(c);
+563  }
+564
+565  public CacheStats getL1Stats() {
+566if (GLOBAL_L1_CACHE_INSTANCE != null) 
{
+567  return 
GLOBAL_L1_CACHE_INSTANCE.getStats();
+568}
+569return null;
+570  }
+571
+572  public CacheStats getL2Stats() {
+573if (GLOBAL_L2_CACHE_INSTANCE != null) 
{
+574  return 
GLOBAL_L2_CACHE_INSTANCE.getStats();
+575}
+576return null;
+577  }
+578
+579  /**
+580   * @param c Configuration to use.
+581   * @return An L1 instance.  Currently 
an instance of LruBlockCache.
+582   */
+583  private synchronized static 
LruBlockCache getL1Internal(final Configuration c) {
+584if (GLOBAL_L1_CACHE_INSTANCE != null) 
return GLOBAL_L1_CACHE_INSTANCE;
+585final long lruCacheSize = 
MemorySizeUtil.getLruCacheSize(c);
+586if (lruCacheSize  0) {
+587  blockCacheDisabled = true;
+588}
+589if (blockCacheDisabled) return 
null;
+590int blockSize = 

[48/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/apidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/org/apache/hadoop/hbase/HConstants.html
index c53f81d..b7f0407 100644
--- a/apidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/org/apache/hadoop/hbase/HConstants.html
@@ -1367,9 +1367,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-OLD_SNAPSHOT_DIR_NAME
+NOT_IMPLEMENTED
 
 
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+OLD_SNAPSHOT_DIR_NAME
+
+
 static long
 OLDEST_TIMESTAMP
 Deprecated.
@@ -1378,446 +1382,446 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 static int
 PRIORITY_UNSET
 QOS attributes: these attributes are used to demarcate RPC 
call processing
  by different set of handlers.
 
 
-
+
 static int
 QOS_THRESHOLD
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 RECOVERED_EDITS_DIR
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_IMPL
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_HANDLER_ABORT_ON_ERROR_PERCENT
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_HANDLER_COUNT
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_IMPL
 Parameter name for what region server implementation to 
use.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_REPLICATION_HANDLER_COUNT
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SPLIT_THREADS_MAX
 The max number of threads used for splitting storefiles in 
parallel during
  the region split process.
 
 
-
+
 static byte[]
 REGIONINFO_QUALIFIER
 The regioninfo column qualifier
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONINFO_QUALIFIER_STR
 The RegionInfo qualifier as a string
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONSERVER_INFO_PORT
 A configuration key for regionserver info port
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONSERVER_INFO_PORT_AUTO
 A flag that enables automatic selection of regionserver 
info port
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONSERVER_METRICS_PERIOD
 The period (in milliseconds) between computing region 
server point in time metrics
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONSERVER_PORT
 Parameter name for port region server listens on.
 
 
-
+
 static int
 REPLAY_QOS
 
-
+
 static byte[]
 REPLICATION_BARRIER_FAMILY
 The replication barrier family
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REPLICATION_BARRIER_FAMILY_STR
 The replication barrier family as a string
 
 
-
+
 static boolean
 REPLICATION_BULKLOAD_ENABLE_DEFAULT
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REPLICATION_BULKLOAD_ENABLE_KEY
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REPLICATION_CLUSTER_ID
 Replication cluster id of source cluster which uniquely 
identifies itself with peer cluster
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REPLICATION_CODEC_CONF_KEY
 Configuration key 

[12/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
index 13f64df..07b6ae0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.html
@@ -28,1533 +28,1529 @@
 020import java.util.ArrayList;
 021import java.util.Arrays;
 022import java.util.Collection;
-023import java.util.Collections;
-024import java.util.Comparator;
-025import java.util.Deque;
-026import java.util.HashMap;
-027import java.util.HashSet;
-028import java.util.Iterator;
-029import java.util.List;
-030import java.util.Map;
-031import java.util.Map.Entry;
-032import java.util.NavigableMap;
-033import java.util.Random;
-034import java.util.Set;
-035import java.util.TreeMap;
-036import java.util.function.Predicate;
-037import java.util.stream.Collectors;
-038
-039import 
org.apache.commons.lang.NotImplementedException;
-040import org.apache.commons.logging.Log;
-041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.conf.Configuration;
-043import 
org.apache.hadoop.hbase.ClusterStatus;
-044import 
org.apache.hadoop.hbase.HBaseIOException;
-045import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-046import 
org.apache.hadoop.hbase.HRegionInfo;
-047import 
org.apache.hadoop.hbase.ServerLoad;
-048import 
org.apache.hadoop.hbase.ServerName;
-049import 
org.apache.hadoop.hbase.TableName;
-050import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-051import 
org.apache.hadoop.hbase.master.LoadBalancer;
-052import 
org.apache.hadoop.hbase.master.MasterServices;
-053import 
org.apache.hadoop.hbase.master.RackManager;
-054import 
org.apache.hadoop.hbase.master.RegionPlan;
-055import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type;
-056import 
org.apache.hadoop.hbase.security.access.AccessControlLists;
-057import 
org.apache.hadoop.util.StringUtils;
-058
-059import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-060import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
-061import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
-062import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-063import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-064import 
org.apache.zookeeper.KeeperException;
-065
-066/**
-067 * The base class for load balancers. It 
provides the the functions used to by
-068 * {@link 
org.apache.hadoop.hbase.master.assignment.AssignmentManager} to assign 
regions
-069 * in the edge cases. It doesn't provide 
an implementation of the
-070 * actual balancing algorithm.
-071 *
-072 */
-073public abstract class BaseLoadBalancer 
implements LoadBalancer {
-074  protected static final int 
MIN_SERVER_BALANCE = 2;
-075  private volatile boolean stopped = 
false;
-076
-077  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-078
-079  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-080= load - 
load.getNumberOfRegions() == 0;
-081
-082  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-083
-084  private static class DefaultRackManager 
extends RackManager {
-085@Override
-086public String getRack(ServerName 
server) {
-087  return UNKNOWN_RACK;
-088}
-089  }
-090
-091  /**
-092   * The constructor that uses the basic 
MetricsBalancer
-093   */
-094  protected BaseLoadBalancer() {
-095metricsBalancer = new 
MetricsBalancer();
-096  }
-097
-098  /**
-099   * This Constructor accepts an instance 
of MetricsBalancer,
-100   * which will be used instead of 
creating a new one
-101   */
-102  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-103this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-104  }
-105
-106  /**
-107   * An efficient array based 
implementation similar to ClusterState for keeping
-108   * the status of the cluster in terms 
of region assignment and distribution.
-109   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-110   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-111   * class uses mostly indexes and 
arrays.
-112   *
-113   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-114   * topology in terms of server names, 
hostnames and racks.
-115   */
-116  protected static class Cluster {
-117ServerName[] servers;
-118String[] hosts; // ServerName 
uniquely 

[45/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 50136f0..98744c9 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Reactor Dependency Convergence
 
@@ -399,19 +399,92 @@
 org.apache.hbase:hbase-spark-it:jar:3.0.0-SNAPSHOT\-org.apache.spark:spark-core_2.10:jar:1.6.0:provided+-org.json4s:json4s-jackson_2.10:jar:3.2.10:provided|\-org.json4s:json4s-core_2.10:jar:3.2.10:provided|\-(com.thoughtworks.paranamer:paranamer:jar:2.6:provided
 - omitted for conflict with 2.3)\-com.fasterxml.jackson.module:jackson-module-scala_2.10:jar:2.4.4:provided\-(com.thoughtworks.paranamer:paranamer:jar:2.6:provided
 - omitted for conflict with 2.3)
 org.apache.hbase:hbase-spark:jar:3.0.0-SNAPSHOT\-org.apache.spark:spark-core_2.10:jar:1.6.0:provided+-org.json4s:json4s-jackson_2.10:jar:3.2.10:provided|\-org.json4s:json4s-core_2.10:jar:3.2.10:provided|\-(com.thoughtworks.paranamer:paranamer:jar:2.6:provided
 - omitted for duplicate)\-com.fasterxml.jackson.module:jackson-module-scala_2.10:jar:2.4.4:provided\-(com.thoughtworks.paranamer:paranamer:jar:2.6:compile
 - scope updated from provided; omitted for duplicate)
 
-commons-net:commons-net
+commons-lang:commons-lang
 
 
 
 
 
 
+2.4
+
+
+org.apache.hbase:hbase-assembly:pom:3.0.0-SNAPSHOT\-org.apache.hbase:hbase-server:jar:3.0.0-SNAPSHOT:compile\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-commons-configuration:commons-configuration:jar:1.6:compile\-(commons-lang:commons-lang:jar:2.4:compile
 - omitted for conflict with 2.6)
+org.apache.hbase:hbase-backup:jar:3.0.0-SNAPSHOT\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-commons-configuration:commons-configuration:jar:1.6:compile\-(commons-lang:commons-lang:jar:2.4:compile
 - omitted for conflict with 2.6)
+org.apache.hbase:hbase-client-project:jar:3.0.0-SNAPSHOT\-org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-commons-configuration:commons-configuration:jar:1.6:compile\-(commons-lang:commons-lang:jar:2.4:compile
 - omitted for conflict with 2.6)
+org.apache.hbase:hbase-client:jar:3.0.0-SNAPSHOT\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-commons-configuration:commons-configuration:jar:1.6:compile\-(commons-lang:commons-lang:jar:2.4:compile
 - omitted for conflict with 2.6)
+org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-commons-configuration:commons-configuration:jar:1.6:compile\-(commons-lang:commons-lang:jar:2.4:compile
 - omitted for conflict with 2.6)
+org.apache.hbase:hbase-endpoint:jar:3.0.0-SNAPSHOT\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-commons-configuration:commons-configuration:jar:1.6:compile\-(commons-lang:commons-lang:jar:2.4:compile
 - omitted for conflict with 2.6)
+org.apache.hbase:hbase-examples:jar:3.0.0-SNAPSHOT\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-commons-configuration:commons-configuration:jar:1.6:compile\-(commons-lang:commons-lang:jar:2.4:compile
 - omitted for conflict with 2.6)
+org.apache.hbase:hbase-external-blockcache:jar:3.0.0-SNAPSHOT\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-commons-configuration:commons-configuration:jar:1.6:compile\-(commons-lang:commons-lang:jar:2.4:compile
 - omitted for conflict with 2.6)
+org.apache.hbase:hbase-hadoop-compat:jar:3.0.0-SNAPSHOT\-org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT:compile\-org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-commons-configuration:commons-configuration:jar:1.6:compile\-(commons-lang:commons-lang:jar:2.4:compile
 - omitted for conflict with 2.6)
+org.apache.hbase:hbase-hadoop2-compat:jar:3.0.0-SNAPSHOT\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-commons-configuration:commons-configuration:jar:1.6:compile\-(commons-lang:commons-lang:jar:2.4:compile
 - omitted for conflict with 2.6)
+org.apache.hbase:hbase-it:jar:3.0.0-SNAPSHOT\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-commons-configuration:commons-configuration:jar:1.6:compile\-(commons-lang:commons-lang:jar:2.4:compile
 - omitted for conflict with 2.6)
+org.apache.hbase:hbase-mapreduce:jar:3.0.0-SNAPSHOT\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-commons-configuration:commons-configuration:jar:1.6:compile\-(commons-lang:commons-lang:jar:2.4:compile
 - omitted for conflict with 2.6)
+org.apache.hbase:hbase-metrics-api:jar:3.0.0-SNAPSHOT\-org.apache.hbase:hbase-common:jar:3.0.0-SNAPSHOT:compile\-org.apache.hadoop:hadoop-common:jar:2.7.1:compile\-commons-configuration:commons-configuration:jar:1.6:compile\-(commons-lang:commons-lang:jar:2.4:compile
 - omitted for conflict with 2.6)

[20/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
index 8609df1..601ad09 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.BucketEntry.html
@@ -361,1266 +361,1267 @@
 353return this.cacheEnabled;
 354  }
 355
-356  public long getMaxSize() {
-357return this.cacheCapacity;
-358  }
-359
-360  public String getIoEngine() {
-361return ioEngine.toString();
-362  }
-363
-364  /**
-365   * Get the IOEngine from the IO engine 
name
-366   * @param ioEngineName
-367   * @param capacity
-368   * @param persistencePath
-369   * @return the IOEngine
-370   * @throws IOException
-371   */
-372  private IOEngine 
getIOEngineFromName(String ioEngineName, long capacity, String 
persistencePath)
-373  throws IOException {
-374if (ioEngineName.startsWith("file:") 
|| ioEngineName.startsWith("files:")) {
-375  // In order to make the usage 
simple, we only need the prefix 'files:' in
-376  // document whether one or multiple 
file(s), but also support 'file:' for
-377  // the compatibility
-378  String[] filePaths = 
ioEngineName.substring(ioEngineName.indexOf(":") + 1)
-379  
.split(FileIOEngine.FILE_DELIMITER);
-380  return new FileIOEngine(capacity, 
persistencePath != null, filePaths);
-381} else if 
(ioEngineName.startsWith("offheap")) {
-382  return new 
ByteBufferIOEngine(capacity, true);
-383} else if 
(ioEngineName.startsWith("heap")) {
-384  return new 
ByteBufferIOEngine(capacity, false);
-385} else if 
(ioEngineName.startsWith("mmap:")) {
-386  return new 
FileMmapEngine(ioEngineName.substring(5), capacity);
-387} else {
-388  throw new 
IllegalArgumentException(
-389  "Don't understand io engine 
name for cache - prefix with file:, heap or offheap");
-390}
-391  }
-392
-393  /**
-394   * Cache the block with the specified 
name and buffer.
-395   * @param cacheKey block's cache key
-396   * @param buf block buffer
-397   */
-398  @Override
-399  public void cacheBlock(BlockCacheKey 
cacheKey, Cacheable buf) {
-400cacheBlock(cacheKey, buf, false, 
false);
-401  }
-402
-403  /**
-404   * Cache the block with the specified 
name and buffer.
-405   * @param cacheKey block's cache key
-406   * @param cachedItem block buffer
-407   * @param inMemory if block is 
in-memory
-408   * @param cacheDataInL1
-409   */
-410  @Override
-411  public void cacheBlock(BlockCacheKey 
cacheKey, Cacheable cachedItem, boolean inMemory,
-412  final boolean cacheDataInL1) {
-413cacheBlockWithWait(cacheKey, 
cachedItem, inMemory, wait_when_cache);
-414  }
-415
-416  /**
-417   * Cache the block to ramCache
-418   * @param cacheKey block's cache key
-419   * @param cachedItem block buffer
-420   * @param inMemory if block is 
in-memory
-421   * @param wait if true, blocking wait 
when queue is full
-422   */
-423  public void 
cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean 
inMemory,
-424  boolean wait) {
-425if (LOG.isTraceEnabled()) 
LOG.trace("Caching key=" + cacheKey + ", item=" + cachedItem);
-426if (!cacheEnabled) {
-427  return;
-428}
-429
-430if (backingMap.containsKey(cacheKey)) 
{
-431  return;
-432}
-433
-434/*
-435 * Stuff the entry into the RAM cache 
so it can get drained to the persistent store
-436 */
-437RAMQueueEntry re =
-438new RAMQueueEntry(cacheKey, 
cachedItem, accessCount.incrementAndGet(), inMemory);
-439if (ramCache.putIfAbsent(cacheKey, 
re) != null) {
-440  return;
-441}
-442int queueNum = (cacheKey.hashCode() 
 0x7FFF) % writerQueues.size();
-443BlockingQueueRAMQueueEntry bq 
= writerQueues.get(queueNum);
-444boolean successfulAddition = false;
-445if (wait) {
-446  try {
-447successfulAddition = bq.offer(re, 
DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);
-448  } catch (InterruptedException e) 
{
-449
Thread.currentThread().interrupt();
-450  }
-451} else {
-452  successfulAddition = 
bq.offer(re);
-453}
-454if (!successfulAddition) {
-455  ramCache.remove(cacheKey);
-456  cacheStats.failInsert();
-457} else {
-458  
this.blockNumber.incrementAndGet();
-459  
this.heapSize.addAndGet(cachedItem.heapSize());
-460  blocksByHFile.add(cacheKey);
-461}
-462  }
-463
-464  /**
-465   * Get the buffer of the block with the 
specified key.
-466   * @param key block's cache key
-467   * @param caching true if the caller 
caches blocks on 

[14/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.html
index 9a639a1..ecf049a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/NettyRpcClientConfigHelper.html
@@ -33,7 +33,7 @@
 025import java.util.HashMap;
 026import java.util.Map;
 027
-028import 
org.apache.commons.lang.StringUtils;
+028import 
org.apache.commons.lang3.StringUtils;
 029import 
org.apache.hadoop.conf.Configuration;
 030import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 031import 
org.apache.hadoop.hbase.util.Pair;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html
index db914b6..875a1b5 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.TableInfo.html
@@ -42,7 +42,7 @@
 034import java.util.function.Function;
 035import java.util.stream.Collectors;
 036
-037import 
org.apache.commons.lang.StringUtils;
+037import 
org.apache.commons.lang3.StringUtils;
 038import org.apache.commons.logging.Log;
 039import 
org.apache.commons.logging.LogFactory;
 040import 
org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
index db914b6..875a1b5 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.WriterLength.html
@@ -42,7 +42,7 @@
 034import java.util.function.Function;
 035import java.util.stream.Collectors;
 036
-037import 
org.apache.commons.lang.StringUtils;
+037import 
org.apache.commons.lang3.StringUtils;
 038import org.apache.commons.logging.Log;
 039import 
org.apache.commons.logging.LogFactory;
 040import 
org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
index db914b6..875a1b5 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.html
@@ -42,7 +42,7 @@
 034import java.util.function.Function;
 035import java.util.stream.Collectors;
 036
-037import 
org.apache.commons.lang.StringUtils;
+037import 
org.apache.commons.lang3.StringUtils;
 038import org.apache.commons.logging.Log;
 039import 
org.apache.commons.logging.LogFactory;
 040import 
org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/ImportTsv.TsvParser.BadTsvLineException.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/ImportTsv.TsvParser.BadTsvLineException.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/ImportTsv.TsvParser.BadTsvLineException.html
index d9410d1..361ad39 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/ImportTsv.TsvParser.BadTsvLineException.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/mapreduce/ImportTsv.TsvParser.BadTsvLineException.html
@@ -34,7 +34,7 @@
 026import java.util.HashSet;
 027import java.util.Set;
 028
-029import 
org.apache.commons.lang.StringUtils;
+029import 
org.apache.commons.lang3.StringUtils;
 030import org.apache.commons.logging.Log;
 031import 
org.apache.commons.logging.LogFactory;
 032import 
org.apache.hadoop.conf.Configuration;


[43/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/index-all.html
--
diff --git a/devapidocs/index-all.html b/devapidocs/index-all.html
index 0c63f2f..5c28fda 100644
--- a/devapidocs/index-all.html
+++ b/devapidocs/index-all.html
@@ -38783,12 +38783,64 @@
 
 getL1(Configuration)
 - Static method in class org.apache.hadoop.hbase.io.hfile.CacheConfig
 
+getL1CacheHitCount()
 - Method in interface org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapper
+
+Hit count of L1 cache.
+
+getL1CacheHitCount()
 - Method in class org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapperImpl
+
+getL1CacheHitRatio()
 - Method in interface org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapper
+
+Hit ratio of L1 cache.
+
+getL1CacheHitRatio()
 - Method in class org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapperImpl
+
+getL1CacheMissCount()
 - Method in interface org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapper
+
+Miss count of L1 cache.
+
+getL1CacheMissCount()
 - Method in class org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapperImpl
+
+getL1CacheMissRatio()
 - Method in interface org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapper
+
+Miss ratio of L1 cache.
+
+getL1CacheMissRatio()
 - Method in class org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapperImpl
+
 getL1Internal(Configuration)
 - Static method in class org.apache.hadoop.hbase.io.hfile.CacheConfig
 
+getL1Stats()
 - Method in class org.apache.hadoop.hbase.io.hfile.CacheConfig
+
 getL2(Configuration)
 - Static method in class org.apache.hadoop.hbase.io.hfile.CacheConfig
 
 getL2BlockCacheHeapPercent(Configuration)
 - Static method in class org.apache.hadoop.hbase.io.util.MemorySizeUtil
 
+getL2CacheHitCount()
 - Method in interface org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapper
+
+Hit count of L2 cache.
+
+getL2CacheHitCount()
 - Method in class org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapperImpl
+
+getL2CacheHitRatio()
 - Method in interface org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapper
+
+Hit ratio of L2 cache.
+
+getL2CacheHitRatio()
 - Method in class org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapperImpl
+
+getL2CacheMissCount()
 - Method in interface org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapper
+
+Miss count of L2 cache.
+
+getL2CacheMissCount()
 - Method in class org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapperImpl
+
+getL2CacheMissRatio()
 - Method in interface org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapper
+
+Miss ratio of L2 cache.
+
+getL2CacheMissRatio()
 - Method in class org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapperImpl
+
+getL2Stats()
 - Method in class org.apache.hadoop.hbase.io.hfile.CacheConfig
+
 getLabel(int)
 - Method in interface org.apache.hadoop.hbase.security.visibility.VisibilityLabelOrdinalProvider
 
 Returns the string associated with the ordinal.
@@ -39946,12 +39998,20 @@
 Return the highest sequence ID found across all storefiles 
in
  the given list.
 
+getMaxSize()
 - Method in interface org.apache.hadoop.hbase.io.hfile.BlockCache
+
+Returns the Max size of the block cache, in bytes.
+
 getMaxSize()
 - Method in class org.apache.hadoop.hbase.io.hfile.bucket.BucketCache
 
+getMaxSize()
 - Method in class org.apache.hadoop.hbase.io.hfile.CombinedBlockCache
+
 getMaxSize()
 - Method in class org.apache.hadoop.hbase.io.hfile.LruBlockCache
 
 Get the maximum size of this cache.
 
+getMaxSize()
 - Method in class org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache
+
 getMaxSize()
 - Method in class org.apache.hadoop.hbase.io.hfile.MemcachedBlockCache.HFileBlockTranscoder
 
 getMaxSleepTime()
 - Method in class org.apache.hadoop.hbase.util.RetryCounter.RetryConfig
@@ -40114,6 +40174,10 @@
 
 getMemStoreLAB()
 - Method in class org.apache.hadoop.hbase.regionserver.Segment
 
+getMemstoreLimit()
 - Method in interface org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapper
+
+getMemstoreLimit()
 - Method in class org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapperImpl
+
 getMemstoreLoad()
 - Method in class org.apache.hadoop.hbase.client.RegionLoadStats
 
 getMemstoreLoadPercent()
 - Method in class org.apache.hadoop.hbase.client.backoff.ServerStatistics.RegionStatistics
@@ -50877,6 +50941,8 @@
 
 GLOBAL_L1_CACHE_INSTANCE
 - Static variable in class org.apache.hadoop.hbase.io.hfile.CacheConfig
 
+GLOBAL_L2_CACHE_INSTANCE
 - Static variable in class org.apache.hadoop.hbase.io.hfile.CacheConfig
+
 globalCache
 - Variable in class org.apache.hadoop.hbase.security.access.TableAuthManager
 
 Cache of global permissions
@@ -51415,7 +51481,7 @@
 If multiple clients with the same principal try to connect 
to the same server at the same time,
  the server assumes a replay attack is in progress.
 

[10/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
index 13f64df..07b6ae0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
@@ -28,1533 +28,1529 @@
 020import java.util.ArrayList;
 021import java.util.Arrays;
 022import java.util.Collection;
-023import java.util.Collections;
-024import java.util.Comparator;
-025import java.util.Deque;
-026import java.util.HashMap;
-027import java.util.HashSet;
-028import java.util.Iterator;
-029import java.util.List;
-030import java.util.Map;
-031import java.util.Map.Entry;
-032import java.util.NavigableMap;
-033import java.util.Random;
-034import java.util.Set;
-035import java.util.TreeMap;
-036import java.util.function.Predicate;
-037import java.util.stream.Collectors;
-038
-039import 
org.apache.commons.lang.NotImplementedException;
-040import org.apache.commons.logging.Log;
-041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.conf.Configuration;
-043import 
org.apache.hadoop.hbase.ClusterStatus;
-044import 
org.apache.hadoop.hbase.HBaseIOException;
-045import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-046import 
org.apache.hadoop.hbase.HRegionInfo;
-047import 
org.apache.hadoop.hbase.ServerLoad;
-048import 
org.apache.hadoop.hbase.ServerName;
-049import 
org.apache.hadoop.hbase.TableName;
-050import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-051import 
org.apache.hadoop.hbase.master.LoadBalancer;
-052import 
org.apache.hadoop.hbase.master.MasterServices;
-053import 
org.apache.hadoop.hbase.master.RackManager;
-054import 
org.apache.hadoop.hbase.master.RegionPlan;
-055import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type;
-056import 
org.apache.hadoop.hbase.security.access.AccessControlLists;
-057import 
org.apache.hadoop.util.StringUtils;
-058
-059import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-060import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
-061import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
-062import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-063import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-064import 
org.apache.zookeeper.KeeperException;
-065
-066/**
-067 * The base class for load balancers. It 
provides the the functions used to by
-068 * {@link 
org.apache.hadoop.hbase.master.assignment.AssignmentManager} to assign 
regions
-069 * in the edge cases. It doesn't provide 
an implementation of the
-070 * actual balancing algorithm.
-071 *
-072 */
-073public abstract class BaseLoadBalancer 
implements LoadBalancer {
-074  protected static final int 
MIN_SERVER_BALANCE = 2;
-075  private volatile boolean stopped = 
false;
-076
-077  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-078
-079  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-080= load - 
load.getNumberOfRegions() == 0;
-081
-082  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-083
-084  private static class DefaultRackManager 
extends RackManager {
-085@Override
-086public String getRack(ServerName 
server) {
-087  return UNKNOWN_RACK;
-088}
-089  }
-090
-091  /**
-092   * The constructor that uses the basic 
MetricsBalancer
-093   */
-094  protected BaseLoadBalancer() {
-095metricsBalancer = new 
MetricsBalancer();
-096  }
-097
-098  /**
-099   * This Constructor accepts an instance 
of MetricsBalancer,
-100   * which will be used instead of 
creating a new one
-101   */
-102  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-103this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-104  }
-105
-106  /**
-107   * An efficient array based 
implementation similar to ClusterState for keeping
-108   * the status of the cluster in terms 
of region assignment and distribution.
-109   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-110   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-111   * class uses mostly indexes and 
arrays.
-112   *
-113   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-114   * topology in terms of server names, 
hostnames and racks.
-115   */
-116  protected static class Cluster {
-117ServerName[] servers;
-118String[] 

[07/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.html
index 13f64df..07b6ae0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.html
@@ -28,1533 +28,1529 @@
 020import java.util.ArrayList;
 021import java.util.Arrays;
 022import java.util.Collection;
-023import java.util.Collections;
-024import java.util.Comparator;
-025import java.util.Deque;
-026import java.util.HashMap;
-027import java.util.HashSet;
-028import java.util.Iterator;
-029import java.util.List;
-030import java.util.Map;
-031import java.util.Map.Entry;
-032import java.util.NavigableMap;
-033import java.util.Random;
-034import java.util.Set;
-035import java.util.TreeMap;
-036import java.util.function.Predicate;
-037import java.util.stream.Collectors;
-038
-039import 
org.apache.commons.lang.NotImplementedException;
-040import org.apache.commons.logging.Log;
-041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.conf.Configuration;
-043import 
org.apache.hadoop.hbase.ClusterStatus;
-044import 
org.apache.hadoop.hbase.HBaseIOException;
-045import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-046import 
org.apache.hadoop.hbase.HRegionInfo;
-047import 
org.apache.hadoop.hbase.ServerLoad;
-048import 
org.apache.hadoop.hbase.ServerName;
-049import 
org.apache.hadoop.hbase.TableName;
-050import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-051import 
org.apache.hadoop.hbase.master.LoadBalancer;
-052import 
org.apache.hadoop.hbase.master.MasterServices;
-053import 
org.apache.hadoop.hbase.master.RackManager;
-054import 
org.apache.hadoop.hbase.master.RegionPlan;
-055import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type;
-056import 
org.apache.hadoop.hbase.security.access.AccessControlLists;
-057import 
org.apache.hadoop.util.StringUtils;
-058
-059import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-060import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
-061import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
-062import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-063import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-064import 
org.apache.zookeeper.KeeperException;
-065
-066/**
-067 * The base class for load balancers. It 
provides the the functions used to by
-068 * {@link 
org.apache.hadoop.hbase.master.assignment.AssignmentManager} to assign 
regions
-069 * in the edge cases. It doesn't provide 
an implementation of the
-070 * actual balancing algorithm.
-071 *
-072 */
-073public abstract class BaseLoadBalancer 
implements LoadBalancer {
-074  protected static final int 
MIN_SERVER_BALANCE = 2;
-075  private volatile boolean stopped = 
false;
-076
-077  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-078
-079  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-080= load - 
load.getNumberOfRegions() == 0;
-081
-082  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-083
-084  private static class DefaultRackManager 
extends RackManager {
-085@Override
-086public String getRack(ServerName 
server) {
-087  return UNKNOWN_RACK;
-088}
-089  }
-090
-091  /**
-092   * The constructor that uses the basic 
MetricsBalancer
-093   */
-094  protected BaseLoadBalancer() {
-095metricsBalancer = new 
MetricsBalancer();
-096  }
-097
-098  /**
-099   * This Constructor accepts an instance 
of MetricsBalancer,
-100   * which will be used instead of 
creating a new one
-101   */
-102  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-103this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-104  }
-105
-106  /**
-107   * An efficient array based 
implementation similar to ClusterState for keeping
-108   * the status of the cluster in terms 
of region assignment and distribution.
-109   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-110   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-111   * class uses mostly indexes and 
arrays.
-112   *
-113   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-114   * topology in terms of server names, 
hostnames and racks.
-115   */
-116  protected static class Cluster {
-117ServerName[] servers;
-118String[] hosts; // ServerName 
uniquely identifies a region server. 

[30/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/regionserver/MutableSegment.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MutableSegment.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MutableSegment.html
index fb3286e..51d2b18 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/MutableSegment.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/MutableSegment.html
@@ -207,7 +207,7 @@ extends 
 boolean
-shouldSeek(Scanscan,
+shouldSeek(TimeRangetr,
   longoldestUnexpiredTS)
 
 
@@ -324,17 +324,17 @@ extends 
+
 
 
 
 
 shouldSeek
-publicbooleanshouldSeek(Scanscan,
+publicbooleanshouldSeek(TimeRangetr,
   longoldestUnexpiredTS)
 
 Specified by:
-shouldSeekin
 classSegment
+shouldSeekin
 classSegment
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
index 46613ec..1ef4fa4 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
@@ -799,7 +799,7 @@ implements 
 private void
-scan(HBaseRpcControllercontroller,
+scan(HBaseRpcControllercontroller,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanRequestrequest,
 RSRpcServices.RegionScannerHolderrsh,
 longmaxQuotaResultSize,
@@ -807,7 +807,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListResultresults,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.Builderbuilder,
-org.apache.commons.lang.mutable.MutableObjectlastBlock,
+org.apache.commons.lang3.mutable.MutableObjectlastBlock,
 RpcCallContextcontext)
 
 
@@ -2433,7 +2433,7 @@ private static finalhttp://docs.oracle.com/javase/8/docs/api/java
   
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.Builderbuilder)
 
 
-
+
 
 
 
@@ -2447,7 +2447,7 @@ private static finalhttp://docs.oracle.com/javase/8/docs/api/java
   intlimitOfRows,
   http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListResultresults,
   
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ScanResponse.Builderbuilder,
-  org.apache.commons.lang.mutable.MutableObjectlastBlock,
+  
org.apache.commons.lang3.mutable.MutableObjectlastBlock,
   RpcCallContextcontext)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.ReversedKVScannerComparator.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.ReversedKVScannerComparator.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.ReversedKVScannerComparator.html
index f2663e1..7a310c1 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.ReversedKVScannerComparator.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.ReversedKVScannerComparator.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class ReversedKeyValueHeap.ReversedKVScannerComparator
+private static class ReversedKeyValueHeap.ReversedKVScannerComparator
 extends KeyValueHeap.KVScannerComparator
 In ReversedKVScannerComparator, we compare the row of 
scanners' peek values
  first, sort bigger one before the smaller one. Then compare the KeyValue if
@@ -233,7 +233,7 @@ extends 
 
 ReversedKVScannerComparator
-publicReversedKVScannerComparator(CellComparatorkvComparator)
+publicReversedKVScannerComparator(CellComparatorkvComparator)
 Constructor
 
 Parameters:
@@ -255,7 +255,7 @@ extends 
 
 compare
-publicintcompare(KeyValueScannerleft,
+publicintcompare(KeyValueScannerleft,
KeyValueScannerright)
 
 Specified by:
@@ -271,7 +271,7 @@ extends 
 
 compareRows
-publicintcompareRows(Cellleft,
+publicintcompareRows(Cellleft,
Cellright)
 Compares rows of two KeyValue
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/regionserver/ReversedKeyValueHeap.html

[17/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
index 8609df1..601ad09 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
@@ -361,1266 +361,1267 @@
 353return this.cacheEnabled;
 354  }
 355
-356  public long getMaxSize() {
-357return this.cacheCapacity;
-358  }
-359
-360  public String getIoEngine() {
-361return ioEngine.toString();
-362  }
-363
-364  /**
-365   * Get the IOEngine from the IO engine 
name
-366   * @param ioEngineName
-367   * @param capacity
-368   * @param persistencePath
-369   * @return the IOEngine
-370   * @throws IOException
-371   */
-372  private IOEngine 
getIOEngineFromName(String ioEngineName, long capacity, String 
persistencePath)
-373  throws IOException {
-374if (ioEngineName.startsWith("file:") 
|| ioEngineName.startsWith("files:")) {
-375  // In order to make the usage 
simple, we only need the prefix 'files:' in
-376  // document whether one or multiple 
file(s), but also support 'file:' for
-377  // the compatibility
-378  String[] filePaths = 
ioEngineName.substring(ioEngineName.indexOf(":") + 1)
-379  
.split(FileIOEngine.FILE_DELIMITER);
-380  return new FileIOEngine(capacity, 
persistencePath != null, filePaths);
-381} else if 
(ioEngineName.startsWith("offheap")) {
-382  return new 
ByteBufferIOEngine(capacity, true);
-383} else if 
(ioEngineName.startsWith("heap")) {
-384  return new 
ByteBufferIOEngine(capacity, false);
-385} else if 
(ioEngineName.startsWith("mmap:")) {
-386  return new 
FileMmapEngine(ioEngineName.substring(5), capacity);
-387} else {
-388  throw new 
IllegalArgumentException(
-389  "Don't understand io engine 
name for cache - prefix with file:, heap or offheap");
-390}
-391  }
-392
-393  /**
-394   * Cache the block with the specified 
name and buffer.
-395   * @param cacheKey block's cache key
-396   * @param buf block buffer
-397   */
-398  @Override
-399  public void cacheBlock(BlockCacheKey 
cacheKey, Cacheable buf) {
-400cacheBlock(cacheKey, buf, false, 
false);
-401  }
-402
-403  /**
-404   * Cache the block with the specified 
name and buffer.
-405   * @param cacheKey block's cache key
-406   * @param cachedItem block buffer
-407   * @param inMemory if block is 
in-memory
-408   * @param cacheDataInL1
-409   */
-410  @Override
-411  public void cacheBlock(BlockCacheKey 
cacheKey, Cacheable cachedItem, boolean inMemory,
-412  final boolean cacheDataInL1) {
-413cacheBlockWithWait(cacheKey, 
cachedItem, inMemory, wait_when_cache);
-414  }
-415
-416  /**
-417   * Cache the block to ramCache
-418   * @param cacheKey block's cache key
-419   * @param cachedItem block buffer
-420   * @param inMemory if block is 
in-memory
-421   * @param wait if true, blocking wait 
when queue is full
-422   */
-423  public void 
cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean 
inMemory,
-424  boolean wait) {
-425if (LOG.isTraceEnabled()) 
LOG.trace("Caching key=" + cacheKey + ", item=" + cachedItem);
-426if (!cacheEnabled) {
-427  return;
-428}
-429
-430if (backingMap.containsKey(cacheKey)) 
{
-431  return;
-432}
-433
-434/*
-435 * Stuff the entry into the RAM cache 
so it can get drained to the persistent store
-436 */
-437RAMQueueEntry re =
-438new RAMQueueEntry(cacheKey, 
cachedItem, accessCount.incrementAndGet(), inMemory);
-439if (ramCache.putIfAbsent(cacheKey, 
re) != null) {
-440  return;
-441}
-442int queueNum = (cacheKey.hashCode() 
 0x7FFF) % writerQueues.size();
-443BlockingQueueRAMQueueEntry bq 
= writerQueues.get(queueNum);
-444boolean successfulAddition = false;
-445if (wait) {
-446  try {
-447successfulAddition = bq.offer(re, 
DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);
-448  } catch (InterruptedException e) 
{
-449
Thread.currentThread().interrupt();
-450  }
-451} else {
-452  successfulAddition = 
bq.offer(re);
-453}
-454if (!successfulAddition) {
-455  ramCache.remove(cacheKey);
-456  cacheStats.failInsert();
-457} else {
-458  
this.blockNumber.incrementAndGet();
-459  
this.heapSize.addAndGet(cachedItem.heapSize());
-460  blocksByHFile.add(cacheKey);
-461}
-462  }
-463
-464  /**
-465   * Get the buffer of the block with the 
specified key.
-466   * @param key block's cache key
-467   * @param caching true if the 

[06/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.DefaultRackManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.DefaultRackManager.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.DefaultRackManager.html
index 13f64df..07b6ae0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.DefaultRackManager.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.DefaultRackManager.html
@@ -28,1533 +28,1529 @@
 020import java.util.ArrayList;
 021import java.util.Arrays;
 022import java.util.Collection;
-023import java.util.Collections;
-024import java.util.Comparator;
-025import java.util.Deque;
-026import java.util.HashMap;
-027import java.util.HashSet;
-028import java.util.Iterator;
-029import java.util.List;
-030import java.util.Map;
-031import java.util.Map.Entry;
-032import java.util.NavigableMap;
-033import java.util.Random;
-034import java.util.Set;
-035import java.util.TreeMap;
-036import java.util.function.Predicate;
-037import java.util.stream.Collectors;
-038
-039import 
org.apache.commons.lang.NotImplementedException;
-040import org.apache.commons.logging.Log;
-041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.conf.Configuration;
-043import 
org.apache.hadoop.hbase.ClusterStatus;
-044import 
org.apache.hadoop.hbase.HBaseIOException;
-045import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-046import 
org.apache.hadoop.hbase.HRegionInfo;
-047import 
org.apache.hadoop.hbase.ServerLoad;
-048import 
org.apache.hadoop.hbase.ServerName;
-049import 
org.apache.hadoop.hbase.TableName;
-050import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-051import 
org.apache.hadoop.hbase.master.LoadBalancer;
-052import 
org.apache.hadoop.hbase.master.MasterServices;
-053import 
org.apache.hadoop.hbase.master.RackManager;
-054import 
org.apache.hadoop.hbase.master.RegionPlan;
-055import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type;
-056import 
org.apache.hadoop.hbase.security.access.AccessControlLists;
-057import 
org.apache.hadoop.util.StringUtils;
-058
-059import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-060import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
-061import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
-062import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-063import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-064import 
org.apache.zookeeper.KeeperException;
-065
-066/**
-067 * The base class for load balancers. It 
provides the the functions used to by
-068 * {@link 
org.apache.hadoop.hbase.master.assignment.AssignmentManager} to assign 
regions
-069 * in the edge cases. It doesn't provide 
an implementation of the
-070 * actual balancing algorithm.
-071 *
-072 */
-073public abstract class BaseLoadBalancer 
implements LoadBalancer {
-074  protected static final int 
MIN_SERVER_BALANCE = 2;
-075  private volatile boolean stopped = 
false;
-076
-077  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-078
-079  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-080= load - 
load.getNumberOfRegions() == 0;
-081
-082  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-083
-084  private static class DefaultRackManager 
extends RackManager {
-085@Override
-086public String getRack(ServerName 
server) {
-087  return UNKNOWN_RACK;
-088}
-089  }
-090
-091  /**
-092   * The constructor that uses the basic 
MetricsBalancer
-093   */
-094  protected BaseLoadBalancer() {
-095metricsBalancer = new 
MetricsBalancer();
-096  }
-097
-098  /**
-099   * This Constructor accepts an instance 
of MetricsBalancer,
-100   * which will be used instead of 
creating a new one
-101   */
-102  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-103this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-104  }
-105
-106  /**
-107   * An efficient array based 
implementation similar to ClusterState for keeping
-108   * the status of the cluster in terms 
of region assignment and distribution.
-109   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-110   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-111   * class uses mostly indexes and 
arrays.
-112   *
-113   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-114   * topology in terms of server names, 
hostnames and racks.
-115   */
-116  protected static class Cluster {
-117ServerName[] servers;
-118String[] hosts; // 

[13/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html
index 13f64df..07b6ae0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.Action.Type.html
@@ -28,1533 +28,1529 @@
 020import java.util.ArrayList;
 021import java.util.Arrays;
 022import java.util.Collection;
-023import java.util.Collections;
-024import java.util.Comparator;
-025import java.util.Deque;
-026import java.util.HashMap;
-027import java.util.HashSet;
-028import java.util.Iterator;
-029import java.util.List;
-030import java.util.Map;
-031import java.util.Map.Entry;
-032import java.util.NavigableMap;
-033import java.util.Random;
-034import java.util.Set;
-035import java.util.TreeMap;
-036import java.util.function.Predicate;
-037import java.util.stream.Collectors;
-038
-039import 
org.apache.commons.lang.NotImplementedException;
-040import org.apache.commons.logging.Log;
-041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.conf.Configuration;
-043import 
org.apache.hadoop.hbase.ClusterStatus;
-044import 
org.apache.hadoop.hbase.HBaseIOException;
-045import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-046import 
org.apache.hadoop.hbase.HRegionInfo;
-047import 
org.apache.hadoop.hbase.ServerLoad;
-048import 
org.apache.hadoop.hbase.ServerName;
-049import 
org.apache.hadoop.hbase.TableName;
-050import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-051import 
org.apache.hadoop.hbase.master.LoadBalancer;
-052import 
org.apache.hadoop.hbase.master.MasterServices;
-053import 
org.apache.hadoop.hbase.master.RackManager;
-054import 
org.apache.hadoop.hbase.master.RegionPlan;
-055import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type;
-056import 
org.apache.hadoop.hbase.security.access.AccessControlLists;
-057import 
org.apache.hadoop.util.StringUtils;
-058
-059import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-060import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
-061import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
-062import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-063import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-064import 
org.apache.zookeeper.KeeperException;
-065
-066/**
-067 * The base class for load balancers. It 
provides the the functions used to by
-068 * {@link 
org.apache.hadoop.hbase.master.assignment.AssignmentManager} to assign 
regions
-069 * in the edge cases. It doesn't provide 
an implementation of the
-070 * actual balancing algorithm.
-071 *
-072 */
-073public abstract class BaseLoadBalancer 
implements LoadBalancer {
-074  protected static final int 
MIN_SERVER_BALANCE = 2;
-075  private volatile boolean stopped = 
false;
-076
-077  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-078
-079  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-080= load - 
load.getNumberOfRegions() == 0;
-081
-082  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-083
-084  private static class DefaultRackManager 
extends RackManager {
-085@Override
-086public String getRack(ServerName 
server) {
-087  return UNKNOWN_RACK;
-088}
-089  }
-090
-091  /**
-092   * The constructor that uses the basic 
MetricsBalancer
-093   */
-094  protected BaseLoadBalancer() {
-095metricsBalancer = new 
MetricsBalancer();
-096  }
-097
-098  /**
-099   * This Constructor accepts an instance 
of MetricsBalancer,
-100   * which will be used instead of 
creating a new one
-101   */
-102  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-103this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-104  }
-105
-106  /**
-107   * An efficient array based 
implementation similar to ClusterState for keeping
-108   * the status of the cluster in terms 
of region assignment and distribution.
-109   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-110   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-111   * class uses mostly indexes and 
arrays.
-112   *
-113   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-114   * topology in terms of server names, 
hostnames and racks.
-115   */
-116  protected static class Cluster {
-117ServerName[] servers;
-118String[] hosts; 

[21/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.html
index 9f8e960..6bc0579 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/MemcachedBlockCache.html
@@ -207,96 +207,101 @@
 199  }
 200
 201  @Override
-202  public long getFreeSize() {
+202  public long getMaxSize() {
 203return 0;
 204  }
 205
 206  @Override
-207  public long getCurrentSize() {
+207  public long getFreeSize() {
 208return 0;
 209  }
 210
 211  @Override
-212  public long getCurrentDataSize() {
+212  public long getCurrentSize() {
 213return 0;
 214  }
 215
 216  @Override
-217  public long getBlockCount() {
+217  public long getCurrentDataSize() {
 218return 0;
 219  }
 220
 221  @Override
-222  public long getDataBlockCount() {
+222  public long getBlockCount() {
 223return 0;
 224  }
 225
 226  @Override
-227  public IteratorCachedBlock 
iterator() {
-228return new 
IteratorCachedBlock() {
-229  @Override
-230  public boolean hasNext() {
-231return false;
-232  }
-233
+227  public long getDataBlockCount() {
+228return 0;
+229  }
+230
+231  @Override
+232  public IteratorCachedBlock 
iterator() {
+233return new 
IteratorCachedBlock() {
 234  @Override
-235  public CachedBlock next() {
-236throw new 
NoSuchElementException("MemcachedBlockCache can't iterate over blocks.");
+235  public boolean hasNext() {
+236return false;
 237  }
 238
 239  @Override
-240  public void remove() {
-241
+240  public CachedBlock next() {
+241throw new 
NoSuchElementException("MemcachedBlockCache can't iterate over blocks.");
 242  }
-243};
-244  }
-245
-246  @Override
-247  public BlockCache[] getBlockCaches() 
{
-248return null;
+243
+244  @Override
+245  public void remove() {
+246
+247  }
+248};
 249  }
 250
-251  /**
-252   * Class to encode and decode an 
HFileBlock to and from memecached's resulting byte arrays.
-253   */
-254  private static class 
HFileBlockTranscoder implements TranscoderHFileBlock {
+251  @Override
+252  public BlockCache[] getBlockCaches() 
{
+253return null;
+254  }
 255
-256@Override
-257public boolean asyncDecode(CachedData 
d) {
-258  return false;
-259}
+256  /**
+257   * Class to encode and decode an 
HFileBlock to and from memecached's resulting byte arrays.
+258   */
+259  private static class 
HFileBlockTranscoder implements TranscoderHFileBlock {
 260
 261@Override
-262public CachedData encode(HFileBlock 
block) {
-263  ByteBuffer bb = 
ByteBuffer.allocate(block.getSerializedLength());
-264  block.serialize(bb);
-265  return new CachedData(0, 
bb.array(), CachedData.MAX_SIZE);
-266}
-267
-268@Override
-269public HFileBlock decode(CachedData 
d) {
-270  try {
-271ByteBuff buf = new 
SingleByteBuff(ByteBuffer.wrap(d.getData()));
-272return (HFileBlock) 
HFileBlock.BLOCK_DESERIALIZER.deserialize(buf, true,
-273  MemoryType.EXCLUSIVE);
-274  } catch (IOException e) {
-275LOG.warn("Error deserializing 
data from memcached",e);
-276  }
-277  return null;
-278}
-279
-280@Override
-281public int getMaxSize() {
-282  return MAX_SIZE;
+262public boolean asyncDecode(CachedData 
d) {
+263  return false;
+264}
+265
+266@Override
+267public CachedData encode(HFileBlock 
block) {
+268  ByteBuffer bb = 
ByteBuffer.allocate(block.getSerializedLength());
+269  block.serialize(bb);
+270  return new CachedData(0, 
bb.array(), CachedData.MAX_SIZE);
+271}
+272
+273@Override
+274public HFileBlock decode(CachedData 
d) {
+275  try {
+276ByteBuff buf = new 
SingleByteBuff(ByteBuffer.wrap(d.getData()));
+277return (HFileBlock) 
HFileBlock.BLOCK_DESERIALIZER.deserialize(buf, true,
+278  MemoryType.EXCLUSIVE);
+279  } catch (IOException e) {
+280LOG.warn("Error deserializing 
data from memcached",e);
+281  }
+282  return null;
 283}
-284  }
-285
-286  @Override
-287  public void returnBlock(BlockCacheKey 
cacheKey, Cacheable block) {
-288// Not doing reference counting. All 
blocks here are EXCLUSIVE
+284
+285@Override
+286public int getMaxSize() {
+287  return MAX_SIZE;
+288}
 289  }
 290
-291}
+291  @Override
+292  public void returnBlock(BlockCacheKey 
cacheKey, Cacheable block) {
+293// Not doing reference counting. All 
blocks here are EXCLUSIVE
+294  }
+295
+296}
 
 
 



[47/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index ec9d416..e77cb19 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -289,7 +289,7 @@
 2038
 0
 0
-12894
+12891
 
 Files
 
@@ -2432,7 +2432,7 @@
 org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java
 0
 0
-41
+43
 
 org/apache/hadoop/hbase/io/hfile/bucket/ByteBufferIOEngine.java
 0
@@ -3327,7 +3327,7 @@
 org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.java
 0
 0
-67
+61
 
 org/apache/hadoop/hbase/master/balancer/ClusterLoadState.java
 0
@@ -4307,7 +4307,7 @@
 org/apache/hadoop/hbase/regionserver/HRegionServer.java
 0
 0
-130
+131
 
 org/apache/hadoop/hbase/regionserver/HRegionServerCommandLine.java
 0
@@ -4452,7 +4452,7 @@
 org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.java
 0
 0
-1
+3
 
 org/apache/hadoop/hbase/regionserver/MetricsRegionWrapper.java
 0
@@ -4652,7 +4652,7 @@
 org/apache/hadoop/hbase/regionserver/SegmentScanner.java
 0
 0
-1
+2
 
 org/apache/hadoop/hbase/regionserver/ServerNonceManager.java
 0
@@ -5177,7 +5177,7 @@
 org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.java
 0
 0
-8
+5
 
 org/apache/hadoop/hbase/replication/WALCellFilter.java
 0
@@ -7123,7 +7123,7 @@
 http://checkstyle.sourceforge.net/config_imports.html#UnusedImports;>UnusedImports
 
 processJavadoc: true
-122
+114
 Error
 
 indentation
@@ -7134,19 +7134,19 @@
 caseIndent: 2
 basicOffset: 2
 lineWrappingIndentation: 2
-3801
+3802
 Error
 
 javadoc
 http://checkstyle.sourceforge.net/config_javadoc.html#JavadocTagContinuationIndentation;>JavadocTagContinuationIndentation
 
 offset: 2
-761
+766
 Error
 
 
 http://checkstyle.sourceforge.net/config_javadoc.html#NonEmptyAtclauseDescription;>NonEmptyAtclauseDescription
-3235
+3230
 Error
 
 misc
@@ -7164,7 +7164,7 @@
 
 max: 100
 ignorePattern: ^package.*|^import.*|a 
href|href|http://|https://|ftp://|org.apache.thrift.|com.google.protobuf.|hbase.protobuf.generated
-1044
+1047
 Error
 
 
@@ -7174,7 +7174,7 @@
 
 whitespace
 http://checkstyle.sourceforge.net/config_whitespace.html#FileTabCharacter;>FileTabCharacter
-5
+6
 Error
 
 
@@ -13110,7 +13110,7 @@
 
 Error
 javadoc
-NonEmptyAtclauseDescription
+JavadocTagContinuationIndentation
 Javadoc comment at column 43 has parse error. Missed HTML close tag 
'TableName'. Sometimes it means that close tag missed for one of previous 
tags.
 179
 
@@ -16962,7 +16962,7 @@
 
 Error
 javadoc
-NonEmptyAtclauseDescription
+JavadocTagContinuationIndentation
 Javadoc comment at column 64 has parse error. Missed HTML close tag 
'code'. Sometimes it means that close tag missed for one of previous tags.
 2084
 
@@ -22611,7 +22611,7 @@
 
 Error
 javadoc
-NonEmptyAtclauseDescription
+JavadocTagContinuationIndentation
 Javadoc comment at column 37 has parse error. Details: no viable 
alternative at input 'ColumnFamily,' while parsing HTML_ELEMENT
 29
 
@@ -32507,7 +32507,7 @@
 indentation
 Indentation
 'method def modifier' have incorrect indentation level 1, expected level 
should be 2.
-121
+127
 
 org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java
 
@@ -32750,49 +32750,49 @@
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-569
+584
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-574
+589
 
 Error
 javadoc
 JavadocTagContinuationIndentation
 Line continuation have incorrect indentation level, expected level should 
be 2.
-585
+600
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-631
+648
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-689
+706
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-690
+707
 
 Error
 sizes
 LineLength
 Line is longer than 100 characters (found 103).
-692
+709
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-693
+710
 
 org/apache/hadoop/hbase/io/hfile/CacheStats.java
 
@@ -34727,25 +34727,25 @@
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-913
+915
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1060
+1062
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1062
+1064
 
 Error
 blocks
 NeedBraces
 'if' construct must use '{}'s.
-1195
+1197
 
 org/apache/hadoop/hbase/io/hfile/LruCachedBlock.java
 
@@ -35174,157 +35174,169 @@
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-366
+367
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-367
+368
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-368
+369
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-370
+371
 
 Error
 javadoc
 NonEmptyAtclauseDescription
 At-clause should have a non-empty description.
-408

[03/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html
index f36f1c4..63d374e 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html
@@ -356,182 +356,197 @@
 348  String 
BLOCK_CACHE_GENERAL_BLOOM_META_HIT_COUNT = 
"blockCacheGeneralBloomMetaHitCount";
 349  String 
BLOCK_CACHE_DELETE_FAMILY_BLOOM_HIT_COUNT = 
"blockCacheDeleteFamilyBloomHitCount";
 350  String BLOCK_CACHE_TRAILER_HIT_COUNT = 
"blockCacheTrailerHitCount";
-351
-352  String RS_START_TIME_NAME = 
"regionServerStartTime";
-353  String ZOOKEEPER_QUORUM_NAME = 
"zookeeperQuorum";
-354  String SERVER_NAME_NAME = 
"serverName";
-355  String CLUSTER_ID_NAME = "clusterId";
-356  String RS_START_TIME_DESC = 
"RegionServer Start Time";
-357  String ZOOKEEPER_QUORUM_DESC = 
"ZooKeeper Quorum";
-358  String SERVER_NAME_DESC = "Server 
Name";
-359  String CLUSTER_ID_DESC = "Cluster 
Id";
-360  String UPDATES_BLOCKED_TIME = 
"updatesBlockedTime";
-361  String UPDATES_BLOCKED_DESC =
-362  "Number of MS updates have been 
blocked so that the memstore can be flushed.";
-363  String DELETE_KEY = "delete";
-364  String CHECK_AND_DELETE_KEY = 
"checkAndDelete";
-365  String CHECK_AND_PUT_KEY = 
"checkAndPut";
-366  String DELETE_BATCH_KEY = 
"deleteBatch";
-367  String GET_SIZE_KEY = "getSize";
-368  String GET_KEY = "get";
-369  String INCREMENT_KEY = "increment";
-370  String PUT_KEY = "put";
-371  String PUT_BATCH_KEY = "putBatch";
-372  String APPEND_KEY = "append";
-373  String REPLAY_KEY = "replay";
-374  String SCAN_KEY = "scan";
-375  String SCAN_SIZE_KEY = "scanSize";
-376  String SCAN_TIME_KEY = "scanTime";
-377
-378  String SLOW_PUT_KEY = "slowPutCount";
-379  String SLOW_GET_KEY = "slowGetCount";
-380  String SLOW_DELETE_KEY = 
"slowDeleteCount";
-381  String SLOW_INCREMENT_KEY = 
"slowIncrementCount";
-382  String SLOW_APPEND_KEY = 
"slowAppendCount";
-383  String SLOW_PUT_DESC =
-384  "The number of batches containing 
puts that took over 1000ms to complete";
-385  String SLOW_DELETE_DESC =
-386  "The number of batches containing 
delete(s) that took over 1000ms to complete";
-387  String SLOW_GET_DESC = "The number of 
Gets that took over 1000ms to complete";
-388  String SLOW_INCREMENT_DESC =
-389  "The number of Increments that took 
over 1000ms to complete";
-390  String SLOW_APPEND_DESC =
-391  "The number of Appends that took 
over 1000ms to complete";
+351  String L1_CACHE_HIT_COUNT = 
"l1CacheHitCount";
+352  String L1_CACHE_HIT_COUNT_DESC = "L1 
cache hit count.";
+353  String L1_CACHE_MISS_COUNT = 
"l1CacheMissCount";
+354  String L1_CACHE_MISS_COUNT_DESC = "L1 
cache miss count.";
+355  String L1_CACHE_HIT_RATIO = 
"l1CacheHitRatio";
+356  String L1_CACHE_HIT_RATIO_DESC = "L1 
cache hit ratio.";
+357  String L1_CACHE_MISS_RATIO = 
"l1CacheMissRatio";
+358  String L1_CACHE_MISS_RATIO_DESC = "L1 
cache miss ratio.";
+359  String L2_CACHE_HIT_COUNT = 
"l2CacheHitCount";
+360  String L2_CACHE_HIT_COUNT_DESC = "L2 
cache hit count.";
+361  String L2_CACHE_MISS_COUNT = 
"l2CacheMissCount";
+362  String L2_CACHE_MISS_COUNT_DESC = "L2 
cache miss count.";
+363  String L2_CACHE_HIT_RATIO = 
"l2CacheHitRatio";
+364  String L2_CACHE_HIT_RATIO_DESC = "L2 
cache hit ratio.";
+365  String L2_CACHE_MISS_RATIO = 
"l2CacheMissRatio";
+366  String L2_CACHE_MISS_RATIO_DESC = "L2 
cache miss ratio.";
+367  String RS_START_TIME_NAME = 
"regionServerStartTime";
+368  String ZOOKEEPER_QUORUM_NAME = 
"zookeeperQuorum";
+369  String SERVER_NAME_NAME = 
"serverName";
+370  String CLUSTER_ID_NAME = "clusterId";
+371  String RS_START_TIME_DESC = 
"RegionServer Start Time";
+372  String ZOOKEEPER_QUORUM_DESC = 
"ZooKeeper Quorum";
+373  String SERVER_NAME_DESC = "Server 
Name";
+374  String CLUSTER_ID_DESC = "Cluster 
Id";
+375  String UPDATES_BLOCKED_TIME = 
"updatesBlockedTime";
+376  String UPDATES_BLOCKED_DESC =
+377  "Number of MS updates have been 
blocked so that the memstore can be flushed.";
+378  String DELETE_KEY = "delete";
+379  String CHECK_AND_DELETE_KEY = 
"checkAndDelete";
+380  String CHECK_AND_PUT_KEY = 
"checkAndPut";
+381  String DELETE_BATCH_KEY = 
"deleteBatch";
+382  String GET_SIZE_KEY = "getSize";
+383  String GET_KEY = "get";
+384  String INCREMENT_KEY = "increment";
+385  String PUT_KEY = "put";
+386  String PUT_BATCH_KEY = "putBatch";
+387  String APPEND_KEY = "append";
+388  String REPLAY_KEY = "replay";
+389  String SCAN_KEY = "scan";
+390  String SCAN_SIZE_KEY = "scanSize";
+391  String SCAN_TIME_KEY = "scanTime";

[46/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 10be9fb..446a726 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -26,7 +26,7 @@ under the License.
 2007 - 2017 The Apache Software Foundation
 
   File: 2038,
- Errors: 12894,
+ Errors: 12891,
  Warnings: 0,
  Infos: 0
   
@@ -15413,7 +15413,7 @@ under the License.
   0
 
 
-  41
+  43
 
   
   
@@ -16057,7 +16057,7 @@ under the License.
   0
 
 
-  1
+  2
 
   
   
@@ -16477,7 +16477,7 @@ under the License.
   0
 
 
-  130
+  131
 
   
   
@@ -19641,7 +19641,7 @@ under the License.
   0
 
 
-  1
+  3
 
   
   
@@ -19893,7 +19893,7 @@ under the License.
   0
 
 
-  8
+  5
 
   
   
@@ -27243,7 +27243,7 @@ under the License.
   0
 
 
-  67
+  61
 
   
   

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/coc.html
--
diff --git a/coc.html b/coc.html
index 3d24561..a9c95c1 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Code of Conduct Policy
@@ -380,7 +380,7 @@ email to mailto:priv...@hbase.apache.org;>the priv
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-09-05
+  Last Published: 
2017-09-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index 80581dc..3e37769 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -679,7 +679,7 @@ Now your HBase server is running, start 
coding and build that next
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-09-05
+  Last Published: 
2017-09-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index 0733f8e..bfc88e9 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependencies
 
@@ -445,7 +445,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-09-05
+  Last Published: 
2017-09-06
 
 
 



[23/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.StatisticsThread.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.StatisticsThread.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.StatisticsThread.html
index 55c1e83..989423f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.StatisticsThread.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.StatisticsThread.html
@@ -848,375 +848,377 @@
 840   *
 841   * @return max size in bytes
 842   */
-843  public long getMaxSize() {
-844return this.maxSize;
-845  }
-846
-847  @Override
-848  public long getCurrentSize() {
-849return this.size.get();
-850  }
-851
-852  @Override
-853  public long getCurrentDataSize() {
-854return this.dataBlockSize.get();
-855  }
-856
-857  @Override
-858  public long getFreeSize() {
-859return getMaxSize() - 
getCurrentSize();
-860  }
-861
-862  @Override
-863  public long size() {
-864return getMaxSize();
-865  }
-866
-867  @Override
-868  public long getBlockCount() {
-869return this.elements.get();
-870  }
-871
-872  @Override
-873  public long getDataBlockCount() {
-874return 
this.dataBlockElements.get();
-875  }
-876
-877  EvictionThread getEvictionThread() {
-878return this.evictionThread;
-879  }
-880
-881  /*
-882   * Eviction thread.  Sits in waiting 
state until an eviction is triggered
-883   * when the cache size grows above the 
acceptable level.p
-884   *
-885   * Thread is triggered into action by 
{@link LruBlockCache#runEviction()}
-886   */
-887  static class EvictionThread extends 
HasThread {
-888
-889private 
WeakReferenceLruBlockCache cache;
-890private volatile boolean go = true;
-891// flag set after enter the run 
method, used for test
-892private boolean enteringRun = 
false;
-893
-894public EvictionThread(LruBlockCache 
cache) {
-895  
super(Thread.currentThread().getName() + ".LruBlockCache.EvictionThread");
-896  setDaemon(true);
-897  this.cache = new 
WeakReference(cache);
-898}
-899
-900@Override
-901public void run() {
-902  enteringRun = true;
-903  while (this.go) {
-904synchronized (this) {
-905  try {
-906this.wait(1000 * 10/*Don't 
wait for ever*/);
-907  } catch (InterruptedException 
e) {
-908LOG.warn("Interrupted 
eviction thread ", e);
-909
Thread.currentThread().interrupt();
-910  }
-911}
-912LruBlockCache cache = 
this.cache.get();
-913if (cache == null) break;
-914cache.evict();
-915  }
-916}
-917
-918
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NN_NAKED_NOTIFY",
-919justification="This is what we 
want")
-920public void evict() {
-921  synchronized (this) {
-922this.notifyAll();
-923  }
-924}
-925
-926synchronized void shutdown() {
-927  this.go = false;
-928  this.notifyAll();
-929}
-930
-931/**
-932 * Used for the test.
-933 */
-934boolean isEnteringRun() {
-935  return this.enteringRun;
-936}
-937  }
-938
-939  /*
-940   * Statistics thread.  Periodically 
prints the cache statistics to the log.
-941   */
-942  static class StatisticsThread extends 
Thread {
-943
-944private final LruBlockCache lru;
+843
+844  @Override
+845  public long getMaxSize() {
+846return this.maxSize;
+847  }
+848
+849  @Override
+850  public long getCurrentSize() {
+851return this.size.get();
+852  }
+853
+854  @Override
+855  public long getCurrentDataSize() {
+856return this.dataBlockSize.get();
+857  }
+858
+859  @Override
+860  public long getFreeSize() {
+861return getMaxSize() - 
getCurrentSize();
+862  }
+863
+864  @Override
+865  public long size() {
+866return getMaxSize();
+867  }
+868
+869  @Override
+870  public long getBlockCount() {
+871return this.elements.get();
+872  }
+873
+874  @Override
+875  public long getDataBlockCount() {
+876return 
this.dataBlockElements.get();
+877  }
+878
+879  EvictionThread getEvictionThread() {
+880return this.evictionThread;
+881  }
+882
+883  /*
+884   * Eviction thread.  Sits in waiting 
state until an eviction is triggered
+885   * when the cache size grows above the 
acceptable level.p
+886   *
+887   * Thread is triggered into action by 
{@link LruBlockCache#runEviction()}
+888   */
+889  static class EvictionThread extends 
HasThread {
+890
+891private 
WeakReferenceLruBlockCache cache;
+892private volatile boolean go = true;
+893// flag set after enter the run 
method, used for test
+894private boolean enteringRun = 
false;
+895
+896public EvictionThread(LruBlockCache 
cache) {
+897  
super(Thread.currentThread().getName() + 

[40/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.CombinedCacheStats.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.CombinedCacheStats.html
 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.CombinedCacheStats.html
index 72f5fd2..56a7dd6 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.CombinedCacheStats.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.CombinedCacheStats.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class CombinedBlockCache.CombinedCacheStats
+public static class CombinedBlockCache.CombinedCacheStats
 extends CacheStats
 
 
@@ -369,7 +369,7 @@ extends 
 
 lruCacheStats
-private finalCacheStats lruCacheStats
+private finalCacheStats lruCacheStats
 
 
 
@@ -378,7 +378,7 @@ extends 
 
 bucketCacheStats
-private finalCacheStats bucketCacheStats
+private finalCacheStats bucketCacheStats
 
 
 
@@ -395,7 +395,7 @@ extends 
 
 CombinedCacheStats
-CombinedCacheStats(CacheStatslbcStats,
+CombinedCacheStats(CacheStatslbcStats,
CacheStatsfcStats)
 
 
@@ -413,7 +413,7 @@ extends 
 
 getDataMissCount
-publiclonggetDataMissCount()
+publiclonggetDataMissCount()
 
 Overrides:
 getDataMissCountin
 classCacheStats
@@ -426,7 +426,7 @@ extends 
 
 getLeafIndexMissCount
-publiclonggetLeafIndexMissCount()
+publiclonggetLeafIndexMissCount()
 
 Overrides:
 getLeafIndexMissCountin
 classCacheStats
@@ -439,7 +439,7 @@ extends 
 
 getBloomChunkMissCount
-publiclonggetBloomChunkMissCount()
+publiclonggetBloomChunkMissCount()
 
 Overrides:
 getBloomChunkMissCountin
 classCacheStats
@@ -452,7 +452,7 @@ extends 
 
 getMetaMissCount
-publiclonggetMetaMissCount()
+publiclonggetMetaMissCount()
 
 Overrides:
 getMetaMissCountin
 classCacheStats
@@ -465,7 +465,7 @@ extends 
 
 getRootIndexMissCount
-publiclonggetRootIndexMissCount()
+publiclonggetRootIndexMissCount()
 
 Overrides:
 getRootIndexMissCountin
 classCacheStats
@@ -478,7 +478,7 @@ extends 
 
 getIntermediateIndexMissCount
-publiclonggetIntermediateIndexMissCount()
+publiclonggetIntermediateIndexMissCount()
 
 Overrides:
 getIntermediateIndexMissCountin
 classCacheStats
@@ -491,7 +491,7 @@ extends 
 
 getFileInfoMissCount
-publiclonggetFileInfoMissCount()
+publiclonggetFileInfoMissCount()
 
 Overrides:
 getFileInfoMissCountin
 classCacheStats
@@ -504,7 +504,7 @@ extends 
 
 getGeneralBloomMetaMissCount
-publiclonggetGeneralBloomMetaMissCount()
+publiclonggetGeneralBloomMetaMissCount()
 
 Overrides:
 getGeneralBloomMetaMissCountin
 classCacheStats
@@ -517,7 +517,7 @@ extends 
 
 getDeleteFamilyBloomMissCount
-publiclonggetDeleteFamilyBloomMissCount()
+publiclonggetDeleteFamilyBloomMissCount()
 
 Overrides:
 getDeleteFamilyBloomMissCountin
 classCacheStats
@@ -530,7 +530,7 @@ extends 
 
 getTrailerMissCount
-publiclonggetTrailerMissCount()
+publiclonggetTrailerMissCount()
 
 Overrides:
 getTrailerMissCountin
 classCacheStats
@@ -543,7 +543,7 @@ extends 
 
 getDataHitCount
-publiclonggetDataHitCount()
+publiclonggetDataHitCount()
 
 Overrides:
 getDataHitCountin
 classCacheStats
@@ -556,7 +556,7 @@ extends 
 
 getLeafIndexHitCount
-publiclonggetLeafIndexHitCount()
+publiclonggetLeafIndexHitCount()
 
 Overrides:
 getLeafIndexHitCountin
 classCacheStats
@@ -569,7 +569,7 @@ extends 
 
 getBloomChunkHitCount
-publiclonggetBloomChunkHitCount()
+publiclonggetBloomChunkHitCount()
 
 Overrides:
 getBloomChunkHitCountin
 classCacheStats
@@ -582,7 +582,7 @@ extends 
 
 getMetaHitCount
-publiclonggetMetaHitCount()
+publiclonggetMetaHitCount()
 
 Overrides:
 getMetaHitCountin
 classCacheStats
@@ -595,7 +595,7 @@ extends 
 
 getRootIndexHitCount
-publiclonggetRootIndexHitCount()
+publiclonggetRootIndexHitCount()
 
 Overrides:
 getRootIndexHitCountin
 classCacheStats
@@ -608,7 +608,7 @@ extends 
 
 getIntermediateIndexHitCount
-publiclonggetIntermediateIndexHitCount()
+publiclonggetIntermediateIndexHitCount()
 
 Overrides:
 getIntermediateIndexHitCountin
 classCacheStats
@@ -621,7 +621,7 @@ extends 
 
 getFileInfoHitCount
-publiclonggetFileInfoHitCount()
+publiclonggetFileInfoHitCount()
 
 Overrides:
 getFileInfoHitCountin
 classCacheStats
@@ -634,7 +634,7 @@ extends 
 
 getGeneralBloomMetaHitCount
-publiclonggetGeneralBloomMetaHitCount()
+publiclonggetGeneralBloomMetaHitCount()
 
 Overrides:
 getGeneralBloomMetaHitCountin
 classCacheStats
@@ -647,7 +647,7 @@ extends 
 
 getDeleteFamilyBloomHitCount
-publiclonggetDeleteFamilyBloomHitCount()
+publiclonggetDeleteFamilyBloomHitCount()
 
 Overrides:
 getDeleteFamilyBloomHitCountin
 classCacheStats
@@ -660,7 +660,7 @@ extends 
 
 getTrailerHitCount
-publiclonggetTrailerHitCount()
+publiclonggetTrailerHitCount()
 
 Overrides:
 getTrailerHitCountin
 classCacheStats
@@ -673,7 +673,7 @@ extends 
 
 getRequestCount

[16/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
index 8609df1..601ad09 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
@@ -361,1266 +361,1267 @@
 353return this.cacheEnabled;
 354  }
 355
-356  public long getMaxSize() {
-357return this.cacheCapacity;
-358  }
-359
-360  public String getIoEngine() {
-361return ioEngine.toString();
-362  }
-363
-364  /**
-365   * Get the IOEngine from the IO engine 
name
-366   * @param ioEngineName
-367   * @param capacity
-368   * @param persistencePath
-369   * @return the IOEngine
-370   * @throws IOException
-371   */
-372  private IOEngine 
getIOEngineFromName(String ioEngineName, long capacity, String 
persistencePath)
-373  throws IOException {
-374if (ioEngineName.startsWith("file:") 
|| ioEngineName.startsWith("files:")) {
-375  // In order to make the usage 
simple, we only need the prefix 'files:' in
-376  // document whether one or multiple 
file(s), but also support 'file:' for
-377  // the compatibility
-378  String[] filePaths = 
ioEngineName.substring(ioEngineName.indexOf(":") + 1)
-379  
.split(FileIOEngine.FILE_DELIMITER);
-380  return new FileIOEngine(capacity, 
persistencePath != null, filePaths);
-381} else if 
(ioEngineName.startsWith("offheap")) {
-382  return new 
ByteBufferIOEngine(capacity, true);
-383} else if 
(ioEngineName.startsWith("heap")) {
-384  return new 
ByteBufferIOEngine(capacity, false);
-385} else if 
(ioEngineName.startsWith("mmap:")) {
-386  return new 
FileMmapEngine(ioEngineName.substring(5), capacity);
-387} else {
-388  throw new 
IllegalArgumentException(
-389  "Don't understand io engine 
name for cache - prefix with file:, heap or offheap");
-390}
-391  }
-392
-393  /**
-394   * Cache the block with the specified 
name and buffer.
-395   * @param cacheKey block's cache key
-396   * @param buf block buffer
-397   */
-398  @Override
-399  public void cacheBlock(BlockCacheKey 
cacheKey, Cacheable buf) {
-400cacheBlock(cacheKey, buf, false, 
false);
-401  }
-402
-403  /**
-404   * Cache the block with the specified 
name and buffer.
-405   * @param cacheKey block's cache key
-406   * @param cachedItem block buffer
-407   * @param inMemory if block is 
in-memory
-408   * @param cacheDataInL1
-409   */
-410  @Override
-411  public void cacheBlock(BlockCacheKey 
cacheKey, Cacheable cachedItem, boolean inMemory,
-412  final boolean cacheDataInL1) {
-413cacheBlockWithWait(cacheKey, 
cachedItem, inMemory, wait_when_cache);
-414  }
-415
-416  /**
-417   * Cache the block to ramCache
-418   * @param cacheKey block's cache key
-419   * @param cachedItem block buffer
-420   * @param inMemory if block is 
in-memory
-421   * @param wait if true, blocking wait 
when queue is full
-422   */
-423  public void 
cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean 
inMemory,
-424  boolean wait) {
-425if (LOG.isTraceEnabled()) 
LOG.trace("Caching key=" + cacheKey + ", item=" + cachedItem);
-426if (!cacheEnabled) {
-427  return;
-428}
-429
-430if (backingMap.containsKey(cacheKey)) 
{
-431  return;
-432}
-433
-434/*
-435 * Stuff the entry into the RAM cache 
so it can get drained to the persistent store
-436 */
-437RAMQueueEntry re =
-438new RAMQueueEntry(cacheKey, 
cachedItem, accessCount.incrementAndGet(), inMemory);
-439if (ramCache.putIfAbsent(cacheKey, 
re) != null) {
-440  return;
-441}
-442int queueNum = (cacheKey.hashCode() 
 0x7FFF) % writerQueues.size();
-443BlockingQueueRAMQueueEntry bq 
= writerQueues.get(queueNum);
-444boolean successfulAddition = false;
-445if (wait) {
-446  try {
-447successfulAddition = bq.offer(re, 
DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);
-448  } catch (InterruptedException e) 
{
-449
Thread.currentThread().interrupt();
-450  }
-451} else {
-452  successfulAddition = 
bq.offer(re);
-453}
-454if (!successfulAddition) {
-455  ramCache.remove(cacheKey);
-456  cacheStats.failInsert();
-457} else {
-458  
this.blockNumber.incrementAndGet();
-459  
this.heapSize.addAndGet(cachedItem.heapSize());
-460  blocksByHFile.add(cacheKey);
-461}
-462  }
-463
-464  /**
-465   * Get the buffer of the block with the 
specified key.
-466   * @param key block's cache key
-467   * @param caching true if the caller 
caches blocks 

hbase-site git commit: INFRA-10751 Empty commit

2017-09-06 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site e2f20c831 -> 76716de14


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/76716de1
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/76716de1
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/76716de1

Branch: refs/heads/asf-site
Commit: 76716de1402a39b7cf06e899d1373e22cf0d7b3b
Parents: e2f20c8
Author: jenkins 
Authored: Wed Sep 6 15:14:15 2017 +
Committer: jenkins 
Committed: Wed Sep 6 15:14:15 2017 +

--

--




[05/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
index 13f64df..07b6ae0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.html
@@ -28,1533 +28,1529 @@
 020import java.util.ArrayList;
 021import java.util.Arrays;
 022import java.util.Collection;
-023import java.util.Collections;
-024import java.util.Comparator;
-025import java.util.Deque;
-026import java.util.HashMap;
-027import java.util.HashSet;
-028import java.util.Iterator;
-029import java.util.List;
-030import java.util.Map;
-031import java.util.Map.Entry;
-032import java.util.NavigableMap;
-033import java.util.Random;
-034import java.util.Set;
-035import java.util.TreeMap;
-036import java.util.function.Predicate;
-037import java.util.stream.Collectors;
-038
-039import 
org.apache.commons.lang.NotImplementedException;
-040import org.apache.commons.logging.Log;
-041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.conf.Configuration;
-043import 
org.apache.hadoop.hbase.ClusterStatus;
-044import 
org.apache.hadoop.hbase.HBaseIOException;
-045import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-046import 
org.apache.hadoop.hbase.HRegionInfo;
-047import 
org.apache.hadoop.hbase.ServerLoad;
-048import 
org.apache.hadoop.hbase.ServerName;
-049import 
org.apache.hadoop.hbase.TableName;
-050import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-051import 
org.apache.hadoop.hbase.master.LoadBalancer;
-052import 
org.apache.hadoop.hbase.master.MasterServices;
-053import 
org.apache.hadoop.hbase.master.RackManager;
-054import 
org.apache.hadoop.hbase.master.RegionPlan;
-055import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type;
-056import 
org.apache.hadoop.hbase.security.access.AccessControlLists;
-057import 
org.apache.hadoop.util.StringUtils;
-058
-059import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-060import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
-061import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
-062import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-063import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-064import 
org.apache.zookeeper.KeeperException;
-065
-066/**
-067 * The base class for load balancers. It 
provides the the functions used to by
-068 * {@link 
org.apache.hadoop.hbase.master.assignment.AssignmentManager} to assign 
regions
-069 * in the edge cases. It doesn't provide 
an implementation of the
-070 * actual balancing algorithm.
-071 *
-072 */
-073public abstract class BaseLoadBalancer 
implements LoadBalancer {
-074  protected static final int 
MIN_SERVER_BALANCE = 2;
-075  private volatile boolean stopped = 
false;
-076
-077  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-078
-079  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-080= load - 
load.getNumberOfRegions() == 0;
-081
-082  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-083
-084  private static class DefaultRackManager 
extends RackManager {
-085@Override
-086public String getRack(ServerName 
server) {
-087  return UNKNOWN_RACK;
-088}
-089  }
-090
-091  /**
-092   * The constructor that uses the basic 
MetricsBalancer
-093   */
-094  protected BaseLoadBalancer() {
-095metricsBalancer = new 
MetricsBalancer();
-096  }
-097
-098  /**
-099   * This Constructor accepts an instance 
of MetricsBalancer,
-100   * which will be used instead of 
creating a new one
-101   */
-102  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-103this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-104  }
-105
-106  /**
-107   * An efficient array based 
implementation similar to ClusterState for keeping
-108   * the status of the cluster in terms 
of region assignment and distribution.
-109   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-110   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-111   * class uses mostly indexes and 
arrays.
-112   *
-113   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-114   * topology in terms of server names, 
hostnames and racks.
-115   */
-116  protected static class Cluster {
-117ServerName[] servers;
-118String[] hosts; // ServerName 
uniquely identifies a region server. multiple RS can run on the same host
-119

[51/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
Published site at .


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/e2f20c83
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/e2f20c83
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/e2f20c83

Branch: refs/heads/asf-site
Commit: e2f20c8311a644a1673cc0233d8e288af652d92e
Parents: e0abe62
Author: jenkins 
Authored: Wed Sep 6 15:13:36 2017 +
Committer: jenkins 
Committed: Wed Sep 6 15:13:36 2017 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf|  8160 ++--
 apidocs/constant-values.html|   167 +-
 apidocs/index-all.html  | 2 +
 apidocs/org/apache/hadoop/hbase/HConstants.html |   215 +-
 .../hadoop/hbase/util/PairOfSameType.html   |16 +-
 .../org/apache/hadoop/hbase/HConstants.html |12 +-
 .../hbase/client/replication/TableCFs.html  | 2 +-
 .../hbase/ipc/NettyRpcClientConfigHelper.html   | 2 +-
 .../hbase/mapreduce/HFileOutputFormat2.html | 2 +-
 .../hadoop/hbase/mapreduce/ImportTsv.html   | 2 +-
 .../hadoop/hbase/mapreduce/RowCounter.html  | 2 +-
 .../hadoop/hbase/util/PairOfSameType.html   |   181 +-
 book.html   | 4 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 11016 -
 checkstyle.rss  |14 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html |   118 +-
 dependency-info.html| 4 +-
 dependency-management.html  |34 +-
 devapidocs/constant-values.html |   285 +-
 devapidocs/index-all.html   |   128 +-
 .../org/apache/hadoop/hbase/HConstants.html |   217 +-
 .../hadoop/hbase/backup/package-tree.html   | 4 +-
 .../hbase/class-use/DoNotRetryIOException.html  | 8 +-
 .../hadoop/hbase/class-use/HRegionLocation.html | 2 +-
 .../hadoop/hbase/class-use/RegionLocations.html | 2 +-
 .../hadoop/hbase/class-use/ServerName.html  | 6 +-
 .../hbase/classification/package-tree.html  | 6 +-
 .../hadoop/hbase/client/ClientScanner.html  | 8 +-
 .../client/FastFailInterceptorContext.html  |28 +-
 .../client/PreemptiveFastFailInterceptor.html   |16 +-
 .../hadoop/hbase/client/ZKAsyncRegistry.html| 6 +-
 .../hadoop/hbase/client/class-use/Result.html   | 4 +-
 .../hadoop/hbase/client/class-use/Scan.html |20 -
 .../hadoop/hbase/client/package-tree.html   |24 +-
 ...ogWorkerCoordination.ZkSplitTaskDetails.html |22 +-
 .../hadoop/hbase/filter/package-tree.html   |10 +-
 .../hadoop/hbase/io/class-use/TimeRange.html|20 +
 .../hadoop/hbase/io/hfile/BlockCache.html   |46 +-
 .../hadoop/hbase/io/hfile/CacheConfig.html  |   103 +-
 .../CombinedBlockCache.CombinedCacheStats.html  |82 +-
 .../hbase/io/hfile/CombinedBlockCache.html  |59 +-
 .../io/hfile/InclusiveCombinedBlockCache.html   | 4 +-
 .../io/hfile/LruBlockCache.EvictionThread.html  |18 +-
 .../hfile/LruBlockCache.StatisticsThread.html   | 8 +-
 .../hadoop/hbase/io/hfile/LruBlockCache.html|60 +-
 ...emcachedBlockCache.HFileBlockTranscoder.html |12 +-
 .../hbase/io/hfile/MemcachedBlockCache.html |49 +-
 .../hbase/io/hfile/ResizableBlockCache.html | 2 +-
 .../hfile/bucket/BucketCache.BucketEntry.html   |42 +-
 .../bucket/BucketCache.BucketEntryGroup.html|22 +-
 .../hfile/bucket/BucketCache.RAMQueueEntry.html |20 +-
 .../bucket/BucketCache.StatisticsThread.html| 8 +-
 .../hfile/bucket/BucketCache.WriterThread.html  |14 +-
 .../hbase/io/hfile/bucket/BucketCache.html  |   106 +-
 .../hbase/io/hfile/class-use/BlockCache.html| 6 +-
 .../hbase/io/hfile/class-use/CacheStats.html|16 +
 .../hadoop/hbase/io/hfile/package-tree.html | 6 +-
 .../hbase/ipc/class-use/HBaseRpcController.html | 4 +-
 .../hbase/ipc/class-use/RpcCallContext.html | 4 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 4 +-
 .../BaseLoadBalancer.Cluster.Action.Type.html   |14 +-
 .../BaseLoadBalancer.Cluster.Action.html|10 +-
 ...LoadBalancer.Cluster.AssignRegionAction.html |12 +-
 .../BaseLoadBalancer.Cluster.LocalityType.html  |10 +-
 ...seLoadBalancer.Cluster.MoveRegionAction.html |14 +-
 ...eLoadBalancer.Cluster.SwapRegionsAction.html |16 +-
 .../balancer/BaseLoadBalancer.Cluster.html  |   148 +-
 

[29/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.html
index c72a83c..388e227 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/replication/TableBasedReplicationQueuesImpl.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class TableBasedReplicationQueuesImpl
+public class TableBasedReplicationQueuesImpl
 extends ReplicationTableBase
 implements ReplicationQueues
 This class provides an implementation of the 
ReplicationQueues interface using an HBase table
@@ -410,7 +410,7 @@ implements 
 
 LOG
-private static finalorg.apache.commons.logging.Log LOG
+private static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -419,7 +419,7 @@ implements 
 
 INITIAL_OFFSET_BYTES
-private static finalbyte[] INITIAL_OFFSET_BYTES
+private static finalbyte[] INITIAL_OFFSET_BYTES
 
 
 
@@ -428,7 +428,7 @@ implements 
 
 EMPTY_STRING_BYTES
-private static finalbyte[] EMPTY_STRING_BYTES
+private static finalbyte[] EMPTY_STRING_BYTES
 
 
 
@@ -437,7 +437,7 @@ implements 
 
 serverName
-privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String serverName
+privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String serverName
 
 
 
@@ -446,7 +446,7 @@ implements 
 
 serverNameBytes
-privatebyte[] serverNameBytes
+privatebyte[] serverNameBytes
 
 
 
@@ -455,7 +455,7 @@ implements 
 
 replicationState
-privateReplicationStateZKBase replicationState
+privateReplicationStateZKBase replicationState
 
 
 
@@ -472,7 +472,7 @@ implements 
 
 TableBasedReplicationQueuesImpl
-publicTableBasedReplicationQueuesImpl(ReplicationQueuesArgumentsargs)
+publicTableBasedReplicationQueuesImpl(ReplicationQueuesArgumentsargs)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -486,7 +486,7 @@ implements 
 
 TableBasedReplicationQueuesImpl
-publicTableBasedReplicationQueuesImpl(org.apache.hadoop.conf.Configurationconf,
+publicTableBasedReplicationQueuesImpl(org.apache.hadoop.conf.Configurationconf,
Abortableabort,
ZooKeeperWatcherzkw)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -510,7 +510,7 @@ implements 
 
 init
-publicvoidinit(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringserverName)
+publicvoidinit(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringserverName)
   throws ReplicationException
 Description copied from 
interface:ReplicationQueues
 Initialize the region server replication queue 
interface.
@@ -531,7 +531,7 @@ implements 
 
 getListOfReplicators
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetListOfReplicators()
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetListOfReplicators()
 Description copied from 
class:ReplicationTableBase
 Get a list of all region servers that have outstanding 
replication queues. These servers could
  be alive, dead or from a previous run of the cluster.
@@ -551,7 +551,7 @@ implements 
 
 removeQueue
-publicvoidremoveQueue(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringqueueId)
+publicvoidremoveQueue(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringqueueId)
 Description copied from 
interface:ReplicationQueues
 Remove a replication queue.
 
@@ -568,7 +568,7 @@ implements 
 
 addLog
-publicvoidaddLog(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringqueueId,

[09/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
index 13f64df..07b6ae0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
@@ -28,1533 +28,1529 @@
 020import java.util.ArrayList;
 021import java.util.Arrays;
 022import java.util.Collection;
-023import java.util.Collections;
-024import java.util.Comparator;
-025import java.util.Deque;
-026import java.util.HashMap;
-027import java.util.HashSet;
-028import java.util.Iterator;
-029import java.util.List;
-030import java.util.Map;
-031import java.util.Map.Entry;
-032import java.util.NavigableMap;
-033import java.util.Random;
-034import java.util.Set;
-035import java.util.TreeMap;
-036import java.util.function.Predicate;
-037import java.util.stream.Collectors;
-038
-039import 
org.apache.commons.lang.NotImplementedException;
-040import org.apache.commons.logging.Log;
-041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.conf.Configuration;
-043import 
org.apache.hadoop.hbase.ClusterStatus;
-044import 
org.apache.hadoop.hbase.HBaseIOException;
-045import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-046import 
org.apache.hadoop.hbase.HRegionInfo;
-047import 
org.apache.hadoop.hbase.ServerLoad;
-048import 
org.apache.hadoop.hbase.ServerName;
-049import 
org.apache.hadoop.hbase.TableName;
-050import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-051import 
org.apache.hadoop.hbase.master.LoadBalancer;
-052import 
org.apache.hadoop.hbase.master.MasterServices;
-053import 
org.apache.hadoop.hbase.master.RackManager;
-054import 
org.apache.hadoop.hbase.master.RegionPlan;
-055import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type;
-056import 
org.apache.hadoop.hbase.security.access.AccessControlLists;
-057import 
org.apache.hadoop.util.StringUtils;
-058
-059import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-060import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
-061import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
-062import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-063import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-064import 
org.apache.zookeeper.KeeperException;
-065
-066/**
-067 * The base class for load balancers. It 
provides the the functions used to by
-068 * {@link 
org.apache.hadoop.hbase.master.assignment.AssignmentManager} to assign 
regions
-069 * in the edge cases. It doesn't provide 
an implementation of the
-070 * actual balancing algorithm.
-071 *
-072 */
-073public abstract class BaseLoadBalancer 
implements LoadBalancer {
-074  protected static final int 
MIN_SERVER_BALANCE = 2;
-075  private volatile boolean stopped = 
false;
-076
-077  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-078
-079  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-080= load - 
load.getNumberOfRegions() == 0;
-081
-082  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-083
-084  private static class DefaultRackManager 
extends RackManager {
-085@Override
-086public String getRack(ServerName 
server) {
-087  return UNKNOWN_RACK;
-088}
-089  }
-090
-091  /**
-092   * The constructor that uses the basic 
MetricsBalancer
-093   */
-094  protected BaseLoadBalancer() {
-095metricsBalancer = new 
MetricsBalancer();
-096  }
-097
-098  /**
-099   * This Constructor accepts an instance 
of MetricsBalancer,
-100   * which will be used instead of 
creating a new one
-101   */
-102  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-103this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-104  }
-105
-106  /**
-107   * An efficient array based 
implementation similar to ClusterState for keeping
-108   * the status of the cluster in terms 
of region assignment and distribution.
-109   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-110   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-111   * class uses mostly indexes and 
arrays.
-112   *
-113   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-114   * topology in terms of server names, 
hostnames and racks.
-115   */
-116  protected static class Cluster {
-117ServerName[] 

[15/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
index 8609df1..601ad09 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
@@ -361,1266 +361,1267 @@
 353return this.cacheEnabled;
 354  }
 355
-356  public long getMaxSize() {
-357return this.cacheCapacity;
-358  }
-359
-360  public String getIoEngine() {
-361return ioEngine.toString();
-362  }
-363
-364  /**
-365   * Get the IOEngine from the IO engine 
name
-366   * @param ioEngineName
-367   * @param capacity
-368   * @param persistencePath
-369   * @return the IOEngine
-370   * @throws IOException
-371   */
-372  private IOEngine 
getIOEngineFromName(String ioEngineName, long capacity, String 
persistencePath)
-373  throws IOException {
-374if (ioEngineName.startsWith("file:") 
|| ioEngineName.startsWith("files:")) {
-375  // In order to make the usage 
simple, we only need the prefix 'files:' in
-376  // document whether one or multiple 
file(s), but also support 'file:' for
-377  // the compatibility
-378  String[] filePaths = 
ioEngineName.substring(ioEngineName.indexOf(":") + 1)
-379  
.split(FileIOEngine.FILE_DELIMITER);
-380  return new FileIOEngine(capacity, 
persistencePath != null, filePaths);
-381} else if 
(ioEngineName.startsWith("offheap")) {
-382  return new 
ByteBufferIOEngine(capacity, true);
-383} else if 
(ioEngineName.startsWith("heap")) {
-384  return new 
ByteBufferIOEngine(capacity, false);
-385} else if 
(ioEngineName.startsWith("mmap:")) {
-386  return new 
FileMmapEngine(ioEngineName.substring(5), capacity);
-387} else {
-388  throw new 
IllegalArgumentException(
-389  "Don't understand io engine 
name for cache - prefix with file:, heap or offheap");
-390}
-391  }
-392
-393  /**
-394   * Cache the block with the specified 
name and buffer.
-395   * @param cacheKey block's cache key
-396   * @param buf block buffer
-397   */
-398  @Override
-399  public void cacheBlock(BlockCacheKey 
cacheKey, Cacheable buf) {
-400cacheBlock(cacheKey, buf, false, 
false);
-401  }
-402
-403  /**
-404   * Cache the block with the specified 
name and buffer.
-405   * @param cacheKey block's cache key
-406   * @param cachedItem block buffer
-407   * @param inMemory if block is 
in-memory
-408   * @param cacheDataInL1
-409   */
-410  @Override
-411  public void cacheBlock(BlockCacheKey 
cacheKey, Cacheable cachedItem, boolean inMemory,
-412  final boolean cacheDataInL1) {
-413cacheBlockWithWait(cacheKey, 
cachedItem, inMemory, wait_when_cache);
-414  }
-415
-416  /**
-417   * Cache the block to ramCache
-418   * @param cacheKey block's cache key
-419   * @param cachedItem block buffer
-420   * @param inMemory if block is 
in-memory
-421   * @param wait if true, blocking wait 
when queue is full
-422   */
-423  public void 
cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean 
inMemory,
-424  boolean wait) {
-425if (LOG.isTraceEnabled()) 
LOG.trace("Caching key=" + cacheKey + ", item=" + cachedItem);
-426if (!cacheEnabled) {
-427  return;
-428}
-429
-430if (backingMap.containsKey(cacheKey)) 
{
-431  return;
-432}
-433
-434/*
-435 * Stuff the entry into the RAM cache 
so it can get drained to the persistent store
-436 */
-437RAMQueueEntry re =
-438new RAMQueueEntry(cacheKey, 
cachedItem, accessCount.incrementAndGet(), inMemory);
-439if (ramCache.putIfAbsent(cacheKey, 
re) != null) {
-440  return;
-441}
-442int queueNum = (cacheKey.hashCode() 
 0x7FFF) % writerQueues.size();
-443BlockingQueueRAMQueueEntry bq 
= writerQueues.get(queueNum);
-444boolean successfulAddition = false;
-445if (wait) {
-446  try {
-447successfulAddition = bq.offer(re, 
DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);
-448  } catch (InterruptedException e) 
{
-449
Thread.currentThread().interrupt();
-450  }
-451} else {
-452  successfulAddition = 
bq.offer(re);
-453}
-454if (!successfulAddition) {
-455  ramCache.remove(cacheKey);
-456  cacheStats.failInsert();
-457} else {
-458  
this.blockNumber.incrementAndGet();
-459  
this.heapSize.addAndGet(cachedItem.heapSize());
-460  blocksByHFile.add(cacheKey);
-461}
-462  }
-463
-464  /**
-465   * Get the buffer of the block with the 
specified key.
-466   * @param key block's cache key
-467   * @param caching true if the caller 
caches blocks on cache misses
-468   * @param repeat Whether this is a 
repeat 

[08/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.SwapRegionsAction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.SwapRegionsAction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.SwapRegionsAction.html
index 13f64df..07b6ae0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.SwapRegionsAction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.SwapRegionsAction.html
@@ -28,1533 +28,1529 @@
 020import java.util.ArrayList;
 021import java.util.Arrays;
 022import java.util.Collection;
-023import java.util.Collections;
-024import java.util.Comparator;
-025import java.util.Deque;
-026import java.util.HashMap;
-027import java.util.HashSet;
-028import java.util.Iterator;
-029import java.util.List;
-030import java.util.Map;
-031import java.util.Map.Entry;
-032import java.util.NavigableMap;
-033import java.util.Random;
-034import java.util.Set;
-035import java.util.TreeMap;
-036import java.util.function.Predicate;
-037import java.util.stream.Collectors;
-038
-039import 
org.apache.commons.lang.NotImplementedException;
-040import org.apache.commons.logging.Log;
-041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.conf.Configuration;
-043import 
org.apache.hadoop.hbase.ClusterStatus;
-044import 
org.apache.hadoop.hbase.HBaseIOException;
-045import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-046import 
org.apache.hadoop.hbase.HRegionInfo;
-047import 
org.apache.hadoop.hbase.ServerLoad;
-048import 
org.apache.hadoop.hbase.ServerName;
-049import 
org.apache.hadoop.hbase.TableName;
-050import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-051import 
org.apache.hadoop.hbase.master.LoadBalancer;
-052import 
org.apache.hadoop.hbase.master.MasterServices;
-053import 
org.apache.hadoop.hbase.master.RackManager;
-054import 
org.apache.hadoop.hbase.master.RegionPlan;
-055import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type;
-056import 
org.apache.hadoop.hbase.security.access.AccessControlLists;
-057import 
org.apache.hadoop.util.StringUtils;
-058
-059import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-060import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
-061import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
-062import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-063import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-064import 
org.apache.zookeeper.KeeperException;
-065
-066/**
-067 * The base class for load balancers. It 
provides the the functions used to by
-068 * {@link 
org.apache.hadoop.hbase.master.assignment.AssignmentManager} to assign 
regions
-069 * in the edge cases. It doesn't provide 
an implementation of the
-070 * actual balancing algorithm.
-071 *
-072 */
-073public abstract class BaseLoadBalancer 
implements LoadBalancer {
-074  protected static final int 
MIN_SERVER_BALANCE = 2;
-075  private volatile boolean stopped = 
false;
-076
-077  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-078
-079  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-080= load - 
load.getNumberOfRegions() == 0;
-081
-082  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-083
-084  private static class DefaultRackManager 
extends RackManager {
-085@Override
-086public String getRack(ServerName 
server) {
-087  return UNKNOWN_RACK;
-088}
-089  }
-090
-091  /**
-092   * The constructor that uses the basic 
MetricsBalancer
-093   */
-094  protected BaseLoadBalancer() {
-095metricsBalancer = new 
MetricsBalancer();
-096  }
-097
-098  /**
-099   * This Constructor accepts an instance 
of MetricsBalancer,
-100   * which will be used instead of 
creating a new one
-101   */
-102  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-103this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-104  }
-105
-106  /**
-107   * An efficient array based 
implementation similar to ClusterState for keeping
-108   * the status of the cluster in terms 
of region assignment and distribution.
-109   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-110   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-111   * class uses mostly indexes and 
arrays.
-112   *
-113   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-114   * topology in terms of server names, 
hostnames and racks.
-115   */
-116  protected static class Cluster {
-117ServerName[] 

[34/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html
index 41f6202..051cd71 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html
@@ -604,6 +604,70 @@ extends 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L1_CACHE_HIT_COUNT
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L1_CACHE_HIT_COUNT_DESC
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L1_CACHE_HIT_RATIO
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L1_CACHE_HIT_RATIO_DESC
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L1_CACHE_MISS_COUNT
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L1_CACHE_MISS_COUNT_DESC
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L1_CACHE_MISS_RATIO
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L1_CACHE_MISS_RATIO_DESC
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L2_CACHE_HIT_COUNT
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L2_CACHE_HIT_COUNT_DESC
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L2_CACHE_HIT_RATIO
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L2_CACHE_HIT_RATIO_DESC
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L2_CACHE_MISS_COUNT
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L2_CACHE_MISS_COUNT_DESC
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L2_CACHE_MISS_RATIO
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+L2_CACHE_MISS_RATIO_DESC
+
+
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 LARGE_COMPACTION_QUEUE_LENGTH
 
 
@@ -2775,13 +2839,221 @@ extends 
 
 
+
+
+
+
+
+L1_CACHE_HIT_COUNT
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String L1_CACHE_HIT_COUNT
+
+See Also:
+Constant
 Field Values
+
+
+
+
+
+
+
+
+L1_CACHE_HIT_COUNT_DESC
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String L1_CACHE_HIT_COUNT_DESC
+
+See Also:
+Constant
 Field Values
+
+
+
+
+
+
+
+
+L1_CACHE_MISS_COUNT
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String L1_CACHE_MISS_COUNT
+
+See Also:
+Constant
 Field Values
+
+
+
+
+
+
+
+
+L1_CACHE_MISS_COUNT_DESC
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String L1_CACHE_MISS_COUNT_DESC
+
+See Also:
+Constant
 Field Values
+
+
+
+
+
+
+
+
+L1_CACHE_HIT_RATIO
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String L1_CACHE_HIT_RATIO
+
+See Also:
+Constant
 Field Values
+
+
+
+
+
+
+
+
+L1_CACHE_HIT_RATIO_DESC
+static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String L1_CACHE_HIT_RATIO_DESC
+
+See Also:
+Constant
 Field Values
+
+
+
+
+
+
+
+
+L1_CACHE_MISS_RATIO
+static 

[38/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
index e9342d2..e74ba0c 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.StatisticsThread.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class BucketCache.StatisticsThread
+private static class BucketCache.StatisticsThread
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?is-external=true;
 title="class or interface in java.lang">Thread
 
 
@@ -239,7 +239,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?
 
 
 bucketCache
-private finalBucketCache bucketCache
+private finalBucketCache bucketCache
 
 
 
@@ -256,7 +256,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?
 
 
 StatisticsThread
-publicStatisticsThread(BucketCachebucketCache)
+publicStatisticsThread(BucketCachebucketCache)
 
 
 
@@ -273,7 +273,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Thread.html?
 
 
 run
-publicvoidrun()
+publicvoidrun()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--;
 title="class or interface in java.lang">runin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
index 0d0855c..48bd4e7 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.WriterThread.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-class BucketCache.WriterThread
+class BucketCache.WriterThread
 extends HasThread
 
 
@@ -231,7 +231,7 @@ extends 
 
 inputQueue
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in java.util.concurrent">BlockingQueueBucketCache.RAMQueueEntry inputQueue
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in java.util.concurrent">BlockingQueueBucketCache.RAMQueueEntry inputQueue
 
 
 
@@ -240,7 +240,7 @@ extends 
 
 writerEnabled
-private volatileboolean writerEnabled
+private volatileboolean writerEnabled
 
 
 
@@ -257,7 +257,7 @@ extends 
 
 WriterThread
-WriterThread(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in java.util.concurrent">BlockingQueueBucketCache.RAMQueueEntryqueue)
+WriterThread(http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/BlockingQueue.html?is-external=true;
 title="class or interface in java.util.concurrent">BlockingQueueBucketCache.RAMQueueEntryqueue)
 
 
 
@@ -274,7 +274,7 @@ extends 
 
 disableWriter
-voiddisableWriter()
+voiddisableWriter()
 
 
 
@@ -283,7 +283,7 @@ extends 
 
 run
-publicvoidrun()
+publicvoidrun()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true#run--;
 title="class or interface in java.lang">runin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/lang/Runnable.html?is-external=true;
 title="class or interface in java.lang">Runnable
@@ -298,7 +298,7 @@ extends 
 
 doDrain
-voiddoDrain(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListBucketCache.RAMQueueEntryentries)
+voiddoDrain(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListBucketCache.RAMQueueEntryentries)
   throws http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException
 Flush the entries in ramCache to IOEngine and add bucket 
entry to backingMap.
  Process all that are passed in even if failure being sure to remove from 
ramCache else we'll

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.html
--

[35/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/regionserver/CellSet.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/CellSet.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/CellSet.html
index d764bf4..7813c10 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/CellSet.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/CellSet.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class CellSet
+public class CellSet
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true;
 title="class or interface in java.util">NavigableSetCell
 A http://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">Set of Cells, where an add will overwrite 
the entry if already
@@ -369,7 +369,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Navigable
 
 
 delegatee
-private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapCell,Cell delegatee
+private finalhttp://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapCell,Cell delegatee
 
 
 
@@ -386,7 +386,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Navigable
 
 
 CellSet
-CellSet(CellComparatorc)
+CellSet(CellComparatorc)
 
 
 
@@ -395,7 +395,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Navigable
 
 
 CellSet
-CellSet(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapCell,Cellm)
+CellSet(http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapCell,Cellm)
 
 
 
@@ -412,7 +412,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Navigable
 
 
 getDelegatee
-http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapCell,CellgetDelegatee()
+http://docs.oracle.com/javase/8/docs/api/java/util/NavigableMap.html?is-external=true;
 title="class or interface in java.util">NavigableMapCell,CellgetDelegatee()
 
 
 
@@ -421,7 +421,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Navigable
 
 
 ceiling
-publicCellceiling(Celle)
+publicCellceiling(Celle)
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true#ceiling-E-;
 title="class or interface in java.util">ceilingin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true;
 title="class or interface in java.util">NavigableSetCell
@@ -434,7 +434,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Navigable
 
 
 descendingIterator
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorCelldescendingIterator()
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorCelldescendingIterator()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true#descendingIterator--;
 title="class or interface in java.util">descendingIteratorin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true;
 title="class or interface in java.util">NavigableSetCell
@@ -447,7 +447,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Navigable
 
 
 descendingSet
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true;
 title="class or interface in java.util">NavigableSetCelldescendingSet()
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true;
 title="class or interface in java.util">NavigableSetCelldescendingSet()
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true#descendingSet--;
 title="class or interface in java.util">descendingSetin 
interfacehttp://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true;
 title="class or interface in java.util">NavigableSetCell
@@ -460,7 +460,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/util/Navigable
 
 
 floor
-publicCellfloor(Celle)
+publicCellfloor(Celle)
 
 Specified by:
 http://docs.oracle.com/javase/8/docs/api/java/util/NavigableSet.html?is-external=true#floor-E-;
 title="class or interface in java.util">floorin 

[44/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index 6171f8d..14376fa 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Dependency Information
 
@@ -318,7 +318,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-09-05
+  Last Published: 
2017-09-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index cd77612..5a839c8 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependency Management
 
@@ -329,71 +329,71 @@
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
-commons-lang
-http://commons.apache.org/lang/;>commons-lang
-2.6
-jar
-http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0
-
 commons-logging
 http://commons.apache.org/proper/commons-logging/;>commons-logging
 1.2
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0
-
+
 io.dropwizard.metrics
 http://metrics.codahale.com/metrics-core/;>metrics-core
 3.2.1
 jar
 http://www.apache.org/licenses/LICENSE-2.0.html;>Apache License 
2.0
-
+
 javax.servlet
 http://servlet-spec.java.net;>javax.servlet-api
 3.1.0
 jar
 https://glassfish.dev.java.net/nonav/public/CDDL+GPL.html;>CDDL + GPLv2 
with classpath exception
-
+
 javax.ws.rs
 http://jax-rs-spec.java.net;>javax.ws.rs-api
 2.0.1
 jar
 http://glassfish.java.net/public/CDDL+GPL_1_1.html;>CDDL 1.1, http://glassfish.java.net/public/CDDL+GPL_1_1.html;>GPL2 w/ 
CPE
-
+
 javax.xml.bind
 http://jaxb.java.net/;>jaxb-api
 2.2.12
 jar
 https://glassfish.java.net/public/CDDL+GPL_1_1.html;>CDDL 1.1, https://glassfish.java.net/public/CDDL+GPL_1_1.html;>GPL2 w/ 
CPE
-
+
 junit
 http://junit.org;>junit
 4.12
 jar
 http://www.eclipse.org/legal/epl-v10.html;>Eclipse Public License 
1.0
-
+
 log4j
 http://logging.apache.org/log4j/1.2/;>log4j
 1.2.17
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0
-
+
 net.spy
 http://www.couchbase.org/code/couchbase/java;>spymemcached
 2.12.2
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0
-
+
 org.apache.avro
 http://avro.apache.org;>avro
 1.7.7
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>The Apache Software 
License, Version 2.0
-
+
 org.apache.commons
 http://commons.apache.org/proper/commons-crypto/;>commons-crypto
 1.0.0
 jar
 http://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
+
+org.apache.commons
+http://commons.apache.org/proper/commons-lang/;>commons-lang3
+3.6
+jar
+https://www.apache.org/licenses/LICENSE-2.0.txt;>Apache License, Version 
2.0
 
 org.apache.commons
 http://commons.apache.org/math/;>commons-math
@@ -936,7 +936,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-09-05
+  Last Published: 
2017-09-06
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/constant-values.html
--
diff --git a/devapidocs/constant-values.html b/devapidocs/constant-values.html
index 4571e6a..518ad2e 100644
--- a/devapidocs/constant-values.html
+++ b/devapidocs/constant-values.html
@@ -2184,566 +2184,573 @@
 0
 
 
+
+
+publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+NOT_IMPLEMENTED
+"Not implemented"
+
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 OLD_SNAPSHOT_DIR_NAME
 ".snapshot"
 
-
+
 
 
 publicstaticfinallong
 OLDEST_TIMESTAMP
 -9223372036854775808L
 
-
+
 
 
 publicstaticfinalint
 PRIORITY_UNSET
 -1
 
-
+
 
 
 publicstaticfinalint
 QOS_THRESHOLD
 10
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 RECOVERED_EDITS_DIR
 "recovered.edits"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_IMPL
 "hbase.hregion.impl"
 
-
+
 
 
 

[42/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HConstants.html 
b/devapidocs/org/apache/hadoop/hbase/HConstants.html
index d7d8361..52ee868 100644
--- a/devapidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/devapidocs/org/apache/hadoop/hbase/HConstants.html
@@ -1388,9 +1388,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
-OLD_SNAPSHOT_DIR_NAME
+NOT_IMPLEMENTED
 
 
+static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+OLD_SNAPSHOT_DIR_NAME
+
+
 static long
 OLDEST_TIMESTAMP
 Deprecated.
@@ -1399,446 +1403,446 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 
-
+
 static int
 PRIORITY_UNSET
 QOS attributes: these attributes are used to demarcate RPC 
call processing
  by different set of handlers.
 
 
-
+
 static int
 QOS_THRESHOLD
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 RECOVERED_EDITS_DIR
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_IMPL
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_HANDLER_ABORT_ON_ERROR_PERCENT
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_HANDLER_COUNT
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_IMPL
 Parameter name for what region server implementation to 
use.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_REPLICATION_HANDLER_COUNT
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SPLIT_THREADS_MAX
 The max number of threads used for splitting storefiles in 
parallel during
  the region split process.
 
 
-
+
 static byte[]
 REGIONINFO_QUALIFIER
 The regioninfo column qualifier
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONINFO_QUALIFIER_STR
 The RegionInfo qualifier as a string
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONSERVER_INFO_PORT
 A configuration key for regionserver info port
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONSERVER_INFO_PORT_AUTO
 A flag that enables automatic selection of regionserver 
info port
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONSERVER_METRICS_PERIOD
 The period (in milliseconds) between computing region 
server point in time metrics
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONSERVER_PORT
 Parameter name for port region server listens on.
 
 
-
+
 static int
 REPLAY_QOS
 
-
+
 static byte[]
 REPLICATION_BARRIER_FAMILY
 The replication barrier family
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REPLICATION_BARRIER_FAMILY_STR
 The replication barrier family as a string
 
 
-
+
 static boolean
 REPLICATION_BULKLOAD_ENABLE_DEFAULT
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REPLICATION_BULKLOAD_ENABLE_KEY
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REPLICATION_CLUSTER_ID
 Replication cluster id of source cluster which uniquely 
identifies itself with peer cluster
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REPLICATION_CODEC_CONF_KEY
 

[50/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/apache_hbase_reference_guide.pdf
--
diff --git a/apache_hbase_reference_guide.pdf b/apache_hbase_reference_guide.pdf
index 18497b7..ecebd4f 100644
--- a/apache_hbase_reference_guide.pdf
+++ b/apache_hbase_reference_guide.pdf
@@ -5,8 +5,8 @@
 /Author (Apache HBase Team)
 /Creator (Asciidoctor PDF 1.5.0.alpha.15, based on Prawn 2.2.2)
 /Producer (Apache HBase Team)
-/ModDate (D:20170905144634+00'00')
-/CreationDate (D:20170905144634+00'00')
+/ModDate (D:20170906144642+00'00')
+/CreationDate (D:20170906144642+00'00')
 >>
 endobj
 2 0 obj
@@ -57534,7 +57534,7 @@ endobj
 [501 0 R /XYZ 0 345.147 null]
 endobj
 507 0 obj
-<< /Length 27453
+<< /Length 27457
 >>
 stream
 q
@@ -57740,7 +57740,7 @@ ET
 BT
 103.24 768.325 Td
 /F4.0 11 Tf
-<6c616e67> Tj
+<6c616e6733> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -57749,7 +57749,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-125.24 768.325 Td
+130.74 768.325 Td
 /F4.0 11 Tf
 <20> Tj
 ET
@@ -57760,7 +57760,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-130.74 768.325 Td
+136.24 768.325 Td
 /F4.0 11 Tf
 <28> Tj
 ET
@@ -57771,7 +57771,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-136.24 768.325 Td
+141.74 768.325 Td
 /F4.0 11 Tf
 <636f6d6d6f6e73> Tj
 ET
@@ -57782,7 +57782,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-174.74 768.325 Td
+180.24 768.325 Td
 /F4.0 11 Tf
 <2d> Tj
 ET
@@ -57793,9 +57793,9 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-180.24 768.325 Td
+185.74 768.325 Td
 /F4.0 11 Tf
-<6c616e67> Tj
+<6c616e6733> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -57804,7 +57804,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-202.24 768.325 Td
+213.24 768.325 Td
 /F4.0 11 Tf
 <2d> Tj
 ET
@@ -57815,9 +57815,9 @@ ET
 1.0 0.4 0.0 SCN
 
 BT
-207.74 768.325 Td
+218.74 768.325 Td
 /F4.0 11 Tf
-<322e35> Tj
+<332e36> Tj
 ET
 
 0.0 0.0 0.0 SCN
@@ -57826,7 +57826,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-224.24 768.325 Td
+235.24 768.325 Td
 /F4.0 11 Tf
 <2e> Tj
 ET
@@ -57837,7 +57837,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-229.74 768.325 Td
+240.74 768.325 Td
 /F4.0 11 Tf
 <6a6172> Tj
 ET
@@ -57848,7 +57848,7 @@ ET
 0.2 0.2 0.2 SCN
 
 BT
-246.24 768.325 Td
+257.24 768.325 Td
 /F4.0 11 Tf
 <29> Tj
 ET
@@ -756323,4074 +756323,4074 @@ xref
 938035 0 n 
 938682 0 n 
 938729 0 n 
-966237 0 n 
-966635 0 n 
-966681 0 n 
-966901 0 n 
-967118 0 n 
-985126 0 n 
-985511 0 n 
-985557 0 n 
-985603 0 n 
-985649 0 n 
-992557 0 n 
-992929 0 n 
-992975 0 n 
-993021 0 n 
-993436 0 n 
-0001003952 0 n 
-0001004395 0 n 
-0001004441 0 n 
-0001004487 0 n 
-0001004604 0 n 
-0001004725 0 n 
-0001004771 0 n 
-0001004962 0 n 
-0001005150 0 n 
-0001005341 0 n 
-0001005387 0 n 
-0001005433 0 n 
-0001005479 0 n 
-0001005525 0 n 
-0001005647 0 n 
-0001005692 0 n 
-0001017466 0 n 
-0001017885 0 n 
-0001017931 0 n 
-0001017977 0 n 
-0001018104 0 n 
-0001018150 0 n 
-0001018196 0 n 
-0001018321 0 n 
-0001018367 0 n 
-0001018486 0 n 
-0001033625 0 n 
-0001034028 0 n 
-0001034074 0 n 
-0001034718 0 n 
-0001034864 0 n 
-000104 0 n 
-0001047109 0 n 
-0001047155 0 n 
-0001047289 0 n 
-0001047560 0 n 
-0001047688 0 n 
-0001047735 0 n 
-0001047782 0 n 
-0001047829 0 n 
-0001047974 0 n 
-0001048021 0 n 
-0001048068 0 n 
-0001048260 0 n 
-0001048552 0 n 
-0001058667 0 n 
-0001059073 0 n 
-0001059267 0 n 
-0001059313 0 n 
-0001059535 0 n 
-0001059728 0 n 
-0001078095 0 n 
-0001078493 0 n 
-0001078539 0 n 
-0001078758 0 n 
-0001078952 0 n 
-0001090335 0 n 
-0001090707 0 n 
-0001101876 0 n 
-0001102248 0 n 
-0001106990 0 n 
-0001107375 0 n 
-0001107421 0 n 
-0001107614 0 n 
-0001107804 0 n 
-000265 0 n 
-000687 0 n 
-000733 0 n 
-000861 0 n 
-0001112031 0 n 
-0001112153 0 n 
-0001112301 0 n 
-0001112448 0 n 
-0001122515 0 n 
-0001122892 0 n 
-0001122938 0 n 
-0001122984 0 n 
-0001123148 0 n 
-0001123195 0 n 
-0001136643 0 n 
-0001137049 0 n 
-0001137254 0 n 
-0001137460 0 n 
-0001137663 0 n 
-0001166302 0 n 
-0001166695 0 n 
-0001166742 0 n 
-0001166979 0 n 
-0001167213 0 n 
-0001167452 0 n 
-0001180090 0 n 
-0001180504 0 n 
-0001180550 0 n 
-0001180596 0 n 
-0001180642 0 n 
-0001180688 0 n 
-0001180883 0 n 
-0001181079 0 n 
-0001181258 0 n 
-0001181454 0 n 
-0001181501 0 n 
-0001193792 0 n 
-0001194238 0 n 
-0001194283 0 n 
-0001194330 0 n 
-0001194466 0 n 
-0001194602 0 n 
-0001194724 0 n 
-0001194771 0 n 
-0001194907 0 n 
-0001195042 0 n 
-0001195181 0 n 
-0001195322 0 n 
-0001195463 0 n 
-0001208329 0 n 
-0001208772 

[04/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/metrics/MetricRegistryInfo.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/metrics/MetricRegistryInfo.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/metrics/MetricRegistryInfo.html
index a40aa29..4d8a968 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/metrics/MetricRegistryInfo.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/metrics/MetricRegistryInfo.html
@@ -27,7 +27,7 @@
 019package 
org.apache.hadoop.hbase.metrics;
 020
 021
-022import 
org.apache.commons.lang.builder.HashCodeBuilder;
+022import 
org.apache.commons.lang3.builder.HashCodeBuilder;
 023import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
 024
 025/**

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.html
index 2f1598f..16c47ec 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/metrics/impl/HBaseMetrics2HadoopMetricsAdapter.html
@@ -42,7 +42,7 @@
 034
 035import java.util.Map;
 036
-037import 
org.apache.commons.lang.StringUtils;
+037import 
org.apache.commons.lang3.StringUtils;
 038import org.apache.commons.logging.Log;
 039import 
org.apache.commons.logging.LogFactory;
 040import 
org.apache.hadoop.hbase.metrics.Counter;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/quotas/MasterQuotaManager.NamedLock.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/MasterQuotaManager.NamedLock.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/MasterQuotaManager.NamedLock.html
index 4c60248..e5a3533 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/MasterQuotaManager.NamedLock.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/MasterQuotaManager.NamedLock.html
@@ -35,7 +35,7 @@
 027import java.util.Map.Entry;
 028import 
java.util.concurrent.ConcurrentHashMap;
 029
-030import 
org.apache.commons.lang.builder.HashCodeBuilder;
+030import 
org.apache.commons.lang3.builder.HashCodeBuilder;
 031import org.apache.commons.logging.Log;
 032import 
org.apache.commons.logging.LogFactory;
 033import 
org.apache.hadoop.hbase.DoNotRetryIOException;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/quotas/MasterQuotaManager.SetQuotaOperations.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/MasterQuotaManager.SetQuotaOperations.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/MasterQuotaManager.SetQuotaOperations.html
index 4c60248..e5a3533 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/MasterQuotaManager.SetQuotaOperations.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/MasterQuotaManager.SetQuotaOperations.html
@@ -35,7 +35,7 @@
 027import java.util.Map.Entry;
 028import 
java.util.concurrent.ConcurrentHashMap;
 029
-030import 
org.apache.commons.lang.builder.HashCodeBuilder;
+030import 
org.apache.commons.lang3.builder.HashCodeBuilder;
 031import org.apache.commons.logging.Log;
 032import 
org.apache.commons.logging.LogFactory;
 033import 
org.apache.hadoop.hbase.DoNotRetryIOException;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/quotas/MasterQuotaManager.SizeSnapshotWithTimestamp.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/MasterQuotaManager.SizeSnapshotWithTimestamp.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/MasterQuotaManager.SizeSnapshotWithTimestamp.html
index 4c60248..e5a3533 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/quotas/MasterQuotaManager.SizeSnapshotWithTimestamp.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/quotas/MasterQuotaManager.SizeSnapshotWithTimestamp.html
@@ -35,7 +35,7 @@
 027import java.util.Map.Entry;
 028import 
java.util.concurrent.ConcurrentHashMap;
 029
-030import 
org.apache.commons.lang.builder.HashCodeBuilder;
+030import 
org.apache.commons.lang3.builder.HashCodeBuilder;
 031import org.apache.commons.logging.Log;
 032import 
org.apache.commons.logging.LogFactory;
 033import 
org.apache.hadoop.hbase.DoNotRetryIOException;


[37/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
index 87e0ee5..94f9ca4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static class BaseLoadBalancer.Cluster.AssignRegionAction
+public static class BaseLoadBalancer.Cluster.AssignRegionAction
 extends BaseLoadBalancer.Cluster.Action
 
 
@@ -239,7 +239,7 @@ extends 
 
 region
-publicint region
+publicint region
 
 
 
@@ -248,7 +248,7 @@ extends 
 
 server
-publicint server
+publicint server
 
 
 
@@ -265,7 +265,7 @@ extends 
 
 AssignRegionAction
-publicAssignRegionAction(intregion,
+publicAssignRegionAction(intregion,
   intserver)
 
 
@@ -283,7 +283,7 @@ extends 
 
 undoAction
-publicBaseLoadBalancer.Cluster.ActionundoAction()
+publicBaseLoadBalancer.Cluster.ActionundoAction()
 Description copied from 
class:BaseLoadBalancer.Cluster.Action
 Returns an Action which would undo this action
 
@@ -298,7 +298,7 @@ extends 
 
 toString
-publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
+publichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtoString()
 
 Overrides:
 toStringin
 classBaseLoadBalancer.Cluster.Action

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
index 081d99e..6cd4f6a 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.LocalityType.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-static enum BaseLoadBalancer.Cluster.LocalityType
+static enum BaseLoadBalancer.Cluster.LocalityType
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumBaseLoadBalancer.Cluster.LocalityType
 
 
@@ -210,7 +210,7 @@ the order they are declared.
 
 
 SERVER
-public static finalBaseLoadBalancer.Cluster.LocalityType
 SERVER
+public static finalBaseLoadBalancer.Cluster.LocalityType
 SERVER
 
 
 
@@ -219,7 +219,7 @@ the order they are declared.
 
 
 RACK
-public static finalBaseLoadBalancer.Cluster.LocalityType
 RACK
+public static finalBaseLoadBalancer.Cluster.LocalityType
 RACK
 
 
 
@@ -236,7 +236,7 @@ the order they are declared.
 
 
 values
-public staticBaseLoadBalancer.Cluster.LocalityType[]values()
+public staticBaseLoadBalancer.Cluster.LocalityType[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This method may be used to iterate
 over the constants as follows:
@@ -256,7 +256,7 @@ for (BaseLoadBalancer.Cluster.LocalityType c : 
BaseLoadBalancer.Cluster.Locality
 
 
 valueOf
-public staticBaseLoadBalancer.Cluster.LocalityTypevalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
+public staticBaseLoadBalancer.Cluster.LocalityTypevalueOf(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname)
 Returns the enum constant of this type with the specified 
name.
 The string must match exactly an identifier used to declare an
 enum constant in this type.  (Extraneous whitespace characters are 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
 
b/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
index 4208c25..084e405 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.MoveRegionAction.html
+++ 

[28/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.html
index 0fcc8f7..165e97e 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.html
@@ -37,7 +37,7 @@
 029import java.util.Map.Entry;
 030import java.util.Set;
 031
-032import 
org.apache.commons.lang.StringUtils;
+032import 
org.apache.commons.lang3.StringUtils;
 033import org.apache.commons.logging.Log;
 034import 
org.apache.commons.logging.LogFactory;
 035import 
org.apache.hadoop.hbase.TableName;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/backup/RestoreDriver.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/RestoreDriver.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/RestoreDriver.html
index 1b7e214..31c0aaf 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/RestoreDriver.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/RestoreDriver.html
@@ -46,7 +46,7 @@
 038
 039import 
org.apache.commons.cli.CommandLine;
 040import 
org.apache.commons.cli.HelpFormatter;
-041import 
org.apache.commons.lang.StringUtils;
+041import 
org.apache.commons.lang3.StringUtils;
 042import org.apache.commons.logging.Log;
 043import 
org.apache.commons.logging.LogFactory;
 044import 
org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
index c896882..a6592c1 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
@@ -34,7 +34,7 @@
 026import java.util.Map;
 027import java.util.Set;
 028
-029import 
org.apache.commons.lang.StringUtils;
+029import 
org.apache.commons.lang3.StringUtils;
 030import org.apache.commons.logging.Log;
 031import 
org.apache.commons.logging.LogFactory;
 032import 
org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
index 4928cd0..89da94f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
@@ -50,7 +50,7 @@
 042import 
org.apache.commons.cli.CommandLine;
 043import 
org.apache.commons.cli.HelpFormatter;
 044import org.apache.commons.cli.Options;
-045import 
org.apache.commons.lang.StringUtils;
+045import 
org.apache.commons.lang3.StringUtils;
 046import 
org.apache.hadoop.conf.Configuration;
 047import 
org.apache.hadoop.conf.Configured;
 048import org.apache.hadoop.fs.FileSystem;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
index 4928cd0..89da94f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
@@ -50,7 +50,7 @@
 042import 
org.apache.commons.cli.CommandLine;
 043import 
org.apache.commons.cli.HelpFormatter;
 044import org.apache.commons.cli.Options;
-045import 
org.apache.commons.lang.StringUtils;
+045import 
org.apache.commons.lang3.StringUtils;
 046import 
org.apache.hadoop.conf.Configuration;
 047import 
org.apache.hadoop.conf.Configured;
 048import org.apache.hadoop.fs.FileSystem;

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
--
diff --git 

[11/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
index 13f64df..07b6ae0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/master/balancer/BaseLoadBalancer.Cluster.AssignRegionAction.html
@@ -28,1533 +28,1529 @@
 020import java.util.ArrayList;
 021import java.util.Arrays;
 022import java.util.Collection;
-023import java.util.Collections;
-024import java.util.Comparator;
-025import java.util.Deque;
-026import java.util.HashMap;
-027import java.util.HashSet;
-028import java.util.Iterator;
-029import java.util.List;
-030import java.util.Map;
-031import java.util.Map.Entry;
-032import java.util.NavigableMap;
-033import java.util.Random;
-034import java.util.Set;
-035import java.util.TreeMap;
-036import java.util.function.Predicate;
-037import java.util.stream.Collectors;
-038
-039import 
org.apache.commons.lang.NotImplementedException;
-040import org.apache.commons.logging.Log;
-041import 
org.apache.commons.logging.LogFactory;
-042import 
org.apache.hadoop.conf.Configuration;
-043import 
org.apache.hadoop.hbase.ClusterStatus;
-044import 
org.apache.hadoop.hbase.HBaseIOException;
-045import 
org.apache.hadoop.hbase.HDFSBlocksDistribution;
-046import 
org.apache.hadoop.hbase.HRegionInfo;
-047import 
org.apache.hadoop.hbase.ServerLoad;
-048import 
org.apache.hadoop.hbase.ServerName;
-049import 
org.apache.hadoop.hbase.TableName;
-050import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-051import 
org.apache.hadoop.hbase.master.LoadBalancer;
-052import 
org.apache.hadoop.hbase.master.MasterServices;
-053import 
org.apache.hadoop.hbase.master.RackManager;
-054import 
org.apache.hadoop.hbase.master.RegionPlan;
-055import 
org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type;
-056import 
org.apache.hadoop.hbase.security.access.AccessControlLists;
-057import 
org.apache.hadoop.util.StringUtils;
-058
-059import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
-060import 
org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
-061import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
-062import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
-063import 
org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
-064import 
org.apache.zookeeper.KeeperException;
-065
-066/**
-067 * The base class for load balancers. It 
provides the the functions used to by
-068 * {@link 
org.apache.hadoop.hbase.master.assignment.AssignmentManager} to assign 
regions
-069 * in the edge cases. It doesn't provide 
an implementation of the
-070 * actual balancing algorithm.
-071 *
-072 */
-073public abstract class BaseLoadBalancer 
implements LoadBalancer {
-074  protected static final int 
MIN_SERVER_BALANCE = 2;
-075  private volatile boolean stopped = 
false;
-076
-077  private static final 
ListHRegionInfo EMPTY_REGION_LIST = new ArrayList(0);
-078
-079  static final 
PredicateServerLoad IDLE_SERVER_PREDICATOR
-080= load - 
load.getNumberOfRegions() == 0;
-081
-082  protected final RegionLocationFinder 
regionFinder = new RegionLocationFinder();
-083
-084  private static class DefaultRackManager 
extends RackManager {
-085@Override
-086public String getRack(ServerName 
server) {
-087  return UNKNOWN_RACK;
-088}
-089  }
-090
-091  /**
-092   * The constructor that uses the basic 
MetricsBalancer
-093   */
-094  protected BaseLoadBalancer() {
-095metricsBalancer = new 
MetricsBalancer();
-096  }
-097
-098  /**
-099   * This Constructor accepts an instance 
of MetricsBalancer,
-100   * which will be used instead of 
creating a new one
-101   */
-102  protected 
BaseLoadBalancer(MetricsBalancer metricsBalancer) {
-103this.metricsBalancer = 
(metricsBalancer != null) ? metricsBalancer : new MetricsBalancer();
-104  }
-105
-106  /**
-107   * An efficient array based 
implementation similar to ClusterState for keeping
-108   * the status of the cluster in terms 
of region assignment and distribution.
-109   * LoadBalancers, such as 
StochasticLoadBalancer uses this Cluster object because of
-110   * hundreds of thousands of hashmap 
manipulations are very costly, which is why this
-111   * class uses mostly indexes and 
arrays.
-112   *
-113   * Cluster tracks a list of unassigned 
regions, region assignments, and the server
-114   * topology in terms of server names, 
hostnames and racks.
-115   */
-116  protected static class Cluster {
-117

[22/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html
index 55c1e83..989423f 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.html
@@ -848,375 +848,377 @@
 840   *
 841   * @return max size in bytes
 842   */
-843  public long getMaxSize() {
-844return this.maxSize;
-845  }
-846
-847  @Override
-848  public long getCurrentSize() {
-849return this.size.get();
-850  }
-851
-852  @Override
-853  public long getCurrentDataSize() {
-854return this.dataBlockSize.get();
-855  }
-856
-857  @Override
-858  public long getFreeSize() {
-859return getMaxSize() - 
getCurrentSize();
-860  }
-861
-862  @Override
-863  public long size() {
-864return getMaxSize();
-865  }
-866
-867  @Override
-868  public long getBlockCount() {
-869return this.elements.get();
-870  }
-871
-872  @Override
-873  public long getDataBlockCount() {
-874return 
this.dataBlockElements.get();
-875  }
-876
-877  EvictionThread getEvictionThread() {
-878return this.evictionThread;
-879  }
-880
-881  /*
-882   * Eviction thread.  Sits in waiting 
state until an eviction is triggered
-883   * when the cache size grows above the 
acceptable level.p
-884   *
-885   * Thread is triggered into action by 
{@link LruBlockCache#runEviction()}
-886   */
-887  static class EvictionThread extends 
HasThread {
-888
-889private 
WeakReferenceLruBlockCache cache;
-890private volatile boolean go = true;
-891// flag set after enter the run 
method, used for test
-892private boolean enteringRun = 
false;
-893
-894public EvictionThread(LruBlockCache 
cache) {
-895  
super(Thread.currentThread().getName() + ".LruBlockCache.EvictionThread");
-896  setDaemon(true);
-897  this.cache = new 
WeakReference(cache);
-898}
-899
-900@Override
-901public void run() {
-902  enteringRun = true;
-903  while (this.go) {
-904synchronized (this) {
-905  try {
-906this.wait(1000 * 10/*Don't 
wait for ever*/);
-907  } catch (InterruptedException 
e) {
-908LOG.warn("Interrupted 
eviction thread ", e);
-909
Thread.currentThread().interrupt();
-910  }
-911}
-912LruBlockCache cache = 
this.cache.get();
-913if (cache == null) break;
-914cache.evict();
-915  }
-916}
-917
-918
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NN_NAKED_NOTIFY",
-919justification="This is what we 
want")
-920public void evict() {
-921  synchronized (this) {
-922this.notifyAll();
-923  }
-924}
-925
-926synchronized void shutdown() {
-927  this.go = false;
-928  this.notifyAll();
-929}
-930
-931/**
-932 * Used for the test.
-933 */
-934boolean isEnteringRun() {
-935  return this.enteringRun;
-936}
-937  }
-938
-939  /*
-940   * Statistics thread.  Periodically 
prints the cache statistics to the log.
-941   */
-942  static class StatisticsThread extends 
Thread {
-943
-944private final LruBlockCache lru;
+843
+844  @Override
+845  public long getMaxSize() {
+846return this.maxSize;
+847  }
+848
+849  @Override
+850  public long getCurrentSize() {
+851return this.size.get();
+852  }
+853
+854  @Override
+855  public long getCurrentDataSize() {
+856return this.dataBlockSize.get();
+857  }
+858
+859  @Override
+860  public long getFreeSize() {
+861return getMaxSize() - 
getCurrentSize();
+862  }
+863
+864  @Override
+865  public long size() {
+866return getMaxSize();
+867  }
+868
+869  @Override
+870  public long getBlockCount() {
+871return this.elements.get();
+872  }
+873
+874  @Override
+875  public long getDataBlockCount() {
+876return 
this.dataBlockElements.get();
+877  }
+878
+879  EvictionThread getEvictionThread() {
+880return this.evictionThread;
+881  }
+882
+883  /*
+884   * Eviction thread.  Sits in waiting 
state until an eviction is triggered
+885   * when the cache size grows above the 
acceptable level.p
+886   *
+887   * Thread is triggered into action by 
{@link LruBlockCache#runEviction()}
+888   */
+889  static class EvictionThread extends 
HasThread {
+890
+891private 
WeakReferenceLruBlockCache cache;
+892private volatile boolean go = true;
+893// flag set after enter the run 
method, used for test
+894private boolean enteringRun = 
false;
+895
+896public EvictionThread(LruBlockCache 
cache) {
+897  
super(Thread.currentThread().getName() + ".LruBlockCache.EvictionThread");
+898  setDaemon(true);
+899  this.cache = new 
WeakReference(cache);

[24/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.EvictionThread.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.EvictionThread.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.EvictionThread.html
index 55c1e83..989423f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.EvictionThread.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/LruBlockCache.EvictionThread.html
@@ -848,375 +848,377 @@
 840   *
 841   * @return max size in bytes
 842   */
-843  public long getMaxSize() {
-844return this.maxSize;
-845  }
-846
-847  @Override
-848  public long getCurrentSize() {
-849return this.size.get();
-850  }
-851
-852  @Override
-853  public long getCurrentDataSize() {
-854return this.dataBlockSize.get();
-855  }
-856
-857  @Override
-858  public long getFreeSize() {
-859return getMaxSize() - 
getCurrentSize();
-860  }
-861
-862  @Override
-863  public long size() {
-864return getMaxSize();
-865  }
-866
-867  @Override
-868  public long getBlockCount() {
-869return this.elements.get();
-870  }
-871
-872  @Override
-873  public long getDataBlockCount() {
-874return 
this.dataBlockElements.get();
-875  }
-876
-877  EvictionThread getEvictionThread() {
-878return this.evictionThread;
-879  }
-880
-881  /*
-882   * Eviction thread.  Sits in waiting 
state until an eviction is triggered
-883   * when the cache size grows above the 
acceptable level.p
-884   *
-885   * Thread is triggered into action by 
{@link LruBlockCache#runEviction()}
-886   */
-887  static class EvictionThread extends 
HasThread {
-888
-889private 
WeakReferenceLruBlockCache cache;
-890private volatile boolean go = true;
-891// flag set after enter the run 
method, used for test
-892private boolean enteringRun = 
false;
-893
-894public EvictionThread(LruBlockCache 
cache) {
-895  
super(Thread.currentThread().getName() + ".LruBlockCache.EvictionThread");
-896  setDaemon(true);
-897  this.cache = new 
WeakReference(cache);
-898}
-899
-900@Override
-901public void run() {
-902  enteringRun = true;
-903  while (this.go) {
-904synchronized (this) {
-905  try {
-906this.wait(1000 * 10/*Don't 
wait for ever*/);
-907  } catch (InterruptedException 
e) {
-908LOG.warn("Interrupted 
eviction thread ", e);
-909
Thread.currentThread().interrupt();
-910  }
-911}
-912LruBlockCache cache = 
this.cache.get();
-913if (cache == null) break;
-914cache.evict();
-915  }
-916}
-917
-918
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value="NN_NAKED_NOTIFY",
-919justification="This is what we 
want")
-920public void evict() {
-921  synchronized (this) {
-922this.notifyAll();
-923  }
-924}
-925
-926synchronized void shutdown() {
-927  this.go = false;
-928  this.notifyAll();
-929}
-930
-931/**
-932 * Used for the test.
-933 */
-934boolean isEnteringRun() {
-935  return this.enteringRun;
-936}
-937  }
-938
-939  /*
-940   * Statistics thread.  Periodically 
prints the cache statistics to the log.
-941   */
-942  static class StatisticsThread extends 
Thread {
-943
-944private final LruBlockCache lru;
+843
+844  @Override
+845  public long getMaxSize() {
+846return this.maxSize;
+847  }
+848
+849  @Override
+850  public long getCurrentSize() {
+851return this.size.get();
+852  }
+853
+854  @Override
+855  public long getCurrentDataSize() {
+856return this.dataBlockSize.get();
+857  }
+858
+859  @Override
+860  public long getFreeSize() {
+861return getMaxSize() - 
getCurrentSize();
+862  }
+863
+864  @Override
+865  public long size() {
+866return getMaxSize();
+867  }
+868
+869  @Override
+870  public long getBlockCount() {
+871return this.elements.get();
+872  }
+873
+874  @Override
+875  public long getDataBlockCount() {
+876return 
this.dataBlockElements.get();
+877  }
+878
+879  EvictionThread getEvictionThread() {
+880return this.evictionThread;
+881  }
+882
+883  /*
+884   * Eviction thread.  Sits in waiting 
state until an eviction is triggered
+885   * when the cache size grows above the 
acceptable level.p
+886   *
+887   * Thread is triggered into action by 
{@link LruBlockCache#runEviction()}
+888   */
+889  static class EvictionThread extends 
HasThread {
+890
+891private 
WeakReferenceLruBlockCache cache;
+892private volatile boolean go = true;
+893// flag set after enter the run 
method, used for test
+894private boolean enteringRun = 
false;
+895
+896public EvictionThread(LruBlockCache 
cache) {
+897  
super(Thread.currentThread().getName() + 

[32/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.html
index d04fa65..9424703 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6};
+var methods = 
{"i0":6,"i1":6,"i2":6,"i3":6,"i4":6,"i5":6,"i6":6,"i7":6,"i8":6,"i9":6,"i10":6,"i11":6,"i12":6,"i13":6,"i14":6,"i15":6,"i16":6,"i17":6,"i18":6,"i19":6,"i20":6,"i21":6,"i22":6,"i23":6,"i24":6,"i25":6,"i26":6,"i27":6,"i28":6,"i29":6,"i30":6,"i31":6,"i32":6,"i33":6,"i34":6,"i35":6,"i36":6,"i37":6,"i38":6,"i39":6,"i40":6,"i41":6,"i42":6,"i43":6,"i44":6,"i45":6,"i46":6,"i47":6,"i48":6,"i49":6,"i50":6,"i51":6,"i52":6,"i53":6,"i54":6,"i55":6,"i56":6,"i57":6,"i58":6,"i59":6,"i60":6,"i61":6,"i62":6,"i63":6,"i64":6,"i65":6,"i66":6,"i67":6,"i68":6,"i69":6,"i70":6,"i71":6,"i72":6,"i73":6,"i74":6,"i75":6,"i76":6,"i77":6,"i78":6,"i79":6,"i80":6,"i81":6,"i82":6,"i83":6,"i84":6,"i85":6,"i86":6,"i87":6,"i88":6,"i89":6,"i90":6,"i91":6,"i92":6,"i93":6,"i94":6,"i95":6,"i96":6,"i97":6,"i98":6,"i99":6,"i100":6,"i101":6,"i102":6,"i103":6,"i104":6,"i105":6,"i106":6,"i107":6};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],4:["t3","Abstract Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -372,290 +372,342 @@ var activeTableTab = "activeTableTab";
 getIntermediateIndexMissCount()
 
 
+long
+getL1CacheHitCount()
+Hit count of L1 cache.
+
+
+
+double
+getL1CacheHitRatio()
+Hit ratio of L1 cache.
+
+
+
+long
+getL1CacheMissCount()
+Miss count of L1 cache.
+
+
+
+double
+getL1CacheMissRatio()
+Miss ratio of L1 cache.
+
+
+
+long
+getL2CacheHitCount()
+Hit count of L2 cache.
+
+
+
+double
+getL2CacheHitRatio()
+Hit ratio of L2 cache.
+
+
+
+long
+getL2CacheMissCount()
+Miss count of L2 cache.
+
+
+
+double
+getL2CacheMissRatio()
+Miss ratio of L2 cache.
+
+
+
 int
 getLargeCompactionQueueSize()
 
-
+
 long
 getLeafIndexHitCount()
 
-
+
 long
 getLeafIndexMissCount()
 
-
+
 long
 getMajorCompactedCellsCount()
 Get the number of cells processed during major 
compactions.
 
 
-
+
 long
 getMajorCompactedCellsSize()
 Get the total amount of data processed during major 
compactions, in bytes.
 
 
-
+
 long
 getMaxStoreFileAge()
 
-
+
+long
+getMemstoreLimit()
+
+
 long
 getMemstoreSize()
 Get the size of the memstore on this region server.
 
 
-
+
 long
 getMetaHitCount()
 
-
+
 long
 getMetaMissCount()
 
-
+
 long
 getMinStoreFileAge()
 
-
+
 long
 getMobFileCacheAccessCount()
 Gets the count of accesses to the mob file cache.
 
 
-
+
 long
 getMobFileCacheCount()
 Gets the count of cached mob files.
 
 
-
+
 long
 getMobFileCacheEvictedCount()
 Gets the number of items evicted from the mob file 
cache.
 
 
-
+
 double
 getMobFileCacheHitPercent()
 Gets the hit percent to the mob file cache.
 
 
-
+
 long
 getMobFileCacheMissCount()
 Gets the count of misses to the mob file cache.
 
 
-
+
 long
 getMobFlushCount()
 Gets the number of the flushes in mob-enabled stores.
 
 
-
+
 long
 getMobFlushedCellsCount()
 Gets the number of mob cells flushed to disk.
 
 
-
+
 long
 getMobFlushedCellsSize()
 Gets the total amount of mob cells flushed to disk, in 
bytes.
 
 
-
+
 long
 getMobScanCellsCount()
 Gets the number of scanned mob cells.
 
 
-
+
 long
 getMobScanCellsSize()
 Gets the total amount of scanned mob cells, in bytes.
 
 
-
+
 long
 getNumMutationsWithoutWAL()
 Number of mutations received with WAL explicitly turned 
off.
 
 
-
+
 long
 getNumOnlineRegions()
 The number of online regions
 
 
-
+
 long
 getNumReferenceFiles()
 
-
+
 long
 getNumStoreFiles()
 Get the number of store files hosted on this region 
server.
 
 
-
+
 long
 getNumStores()
 Get the number of stores hosted on this region server.
 
 
-
+
 long
 getNumWALFiles()
 Get the number of WAL 

[49/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/apidocs/constant-values.html
--
diff --git a/apidocs/constant-values.html b/apidocs/constant-values.html
index d220e26..6d727a9 100644
--- a/apidocs/constant-values.html
+++ b/apidocs/constant-values.html
@@ -1921,566 +1921,573 @@
 0
 
 
+
+
+publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+NOT_IMPLEMENTED
+"Not implemented"
+
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 OLD_SNAPSHOT_DIR_NAME
 ".snapshot"
 
-
+
 
 
 publicstaticfinallong
 OLDEST_TIMESTAMP
 -9223372036854775808L
 
-
+
 
 
 publicstaticfinalint
 PRIORITY_UNSET
 -1
 
-
+
 
 
 publicstaticfinalint
 QOS_THRESHOLD
 10
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 RECOVERED_EDITS_DIR
 "recovered.edits"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_IMPL
 "hbase.hregion.impl"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_HANDLER_ABORT_ON_ERROR_PERCENT
 "hbase.regionserver.handler.abort.on.error.percent"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_HANDLER_COUNT
 "hbase.regionserver.handler.count"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT
 "hbase.regionserver.metahandler.count"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_IMPL
 "hbase.regionserver.impl"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_REPLICATION_HANDLER_COUNT
 "hbase.regionserver.replication.handler.count"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SPLIT_THREADS_MAX
 "hbase.regionserver.region.split.threads.max"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONINFO_QUALIFIER_STR
 "regioninfo"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONSERVER_INFO_PORT
 "hbase.regionserver.info.port"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONSERVER_INFO_PORT_AUTO
 "hbase.regionserver.info.port.auto"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONSERVER_METRICS_PERIOD
 "hbase.regionserver.metrics.period"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGIONSERVER_PORT
 "hbase.regionserver.port"
 
-
+
 
 
 publicstaticfinalint
 REPLAY_QOS
 6
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REPLICATION_BARRIER_FAMILY_STR
 "rep_barrier"
 
-
+
 
 
 publicstaticfinalboolean
 REPLICATION_BULKLOAD_ENABLE_DEFAULT
 false
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REPLICATION_BULKLOAD_ENABLE_KEY
 "hbase.replication.bulkload.enabled"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REPLICATION_CLUSTER_ID
 "hbase.replication.cluster.id"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REPLICATION_CODEC_CONF_KEY
 "hbase.replication.rpc.codec"
 
-
+
 
 
 publicstaticfinalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REPLICATION_CONF_DIR
 "hbase.replication.conf.dir"
 
-
+
 
 
 

[26/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.CombinedCacheStats.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.CombinedCacheStats.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.CombinedCacheStats.html
index 7fd2a7a..4bf66ed 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.CombinedCacheStats.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/CombinedBlockCache.CombinedCacheStats.html
@@ -122,269 +122,274 @@
 114  }
 115
 116  @Override
-117  public long getCurrentDataSize() {
-118return lruCache.getCurrentDataSize() 
+ l2Cache.getCurrentDataSize();
+117  public long getMaxSize() {
+118return lruCache.getMaxSize() + 
l2Cache.getMaxSize();
 119  }
 120
 121  @Override
-122  public long getFreeSize() {
-123return lruCache.getFreeSize() + 
l2Cache.getFreeSize();
+122  public long getCurrentDataSize() {
+123return lruCache.getCurrentDataSize() 
+ l2Cache.getCurrentDataSize();
 124  }
 125
 126  @Override
-127  public long getCurrentSize() {
-128return lruCache.getCurrentSize() + 
l2Cache.getCurrentSize();
+127  public long getFreeSize() {
+128return lruCache.getFreeSize() + 
l2Cache.getFreeSize();
 129  }
 130
 131  @Override
-132  public long getBlockCount() {
-133return lruCache.getBlockCount() + 
l2Cache.getBlockCount();
+132  public long getCurrentSize() {
+133return lruCache.getCurrentSize() + 
l2Cache.getCurrentSize();
 134  }
 135
 136  @Override
-137  public long getDataBlockCount() {
-138return lruCache.getDataBlockCount() + 
l2Cache.getDataBlockCount();
+137  public long getBlockCount() {
+138return lruCache.getBlockCount() + 
l2Cache.getBlockCount();
 139  }
 140
-141  public static class CombinedCacheStats 
extends CacheStats {
-142private final CacheStats 
lruCacheStats;
-143private final CacheStats 
bucketCacheStats;
-144
-145CombinedCacheStats(CacheStats 
lbcStats, CacheStats fcStats) {
-146  super("CombinedBlockCache");
-147  this.lruCacheStats = lbcStats;
-148  this.bucketCacheStats = fcStats;
-149}
-150
-151@Override
-152public long getDataMissCount() {
-153  return 
lruCacheStats.getDataMissCount() + bucketCacheStats.getDataMissCount();
+141  @Override
+142  public long getDataBlockCount() {
+143return lruCache.getDataBlockCount() + 
l2Cache.getDataBlockCount();
+144  }
+145
+146  public static class CombinedCacheStats 
extends CacheStats {
+147private final CacheStats 
lruCacheStats;
+148private final CacheStats 
bucketCacheStats;
+149
+150CombinedCacheStats(CacheStats 
lbcStats, CacheStats fcStats) {
+151  super("CombinedBlockCache");
+152  this.lruCacheStats = lbcStats;
+153  this.bucketCacheStats = fcStats;
 154}
 155
 156@Override
-157public long getLeafIndexMissCount() 
{
-158  return 
lruCacheStats.getLeafIndexMissCount() + 
bucketCacheStats.getLeafIndexMissCount();
+157public long getDataMissCount() {
+158  return 
lruCacheStats.getDataMissCount() + bucketCacheStats.getDataMissCount();
 159}
 160
 161@Override
-162public long getBloomChunkMissCount() 
{
-163  return 
lruCacheStats.getBloomChunkMissCount() + 
bucketCacheStats.getBloomChunkMissCount();
+162public long getLeafIndexMissCount() 
{
+163  return 
lruCacheStats.getLeafIndexMissCount() + 
bucketCacheStats.getLeafIndexMissCount();
 164}
 165
 166@Override
-167public long getMetaMissCount() {
-168  return 
lruCacheStats.getMetaMissCount() + bucketCacheStats.getMetaMissCount();
+167public long getBloomChunkMissCount() 
{
+168  return 
lruCacheStats.getBloomChunkMissCount() + 
bucketCacheStats.getBloomChunkMissCount();
 169}
 170
 171@Override
-172public long getRootIndexMissCount() 
{
-173  return 
lruCacheStats.getRootIndexMissCount() + 
bucketCacheStats.getRootIndexMissCount();
+172public long getMetaMissCount() {
+173  return 
lruCacheStats.getMetaMissCount() + bucketCacheStats.getMetaMissCount();
 174}
 175
 176@Override
-177public long 
getIntermediateIndexMissCount() {
-178  return 
lruCacheStats.getIntermediateIndexMissCount() +
-179  
bucketCacheStats.getIntermediateIndexMissCount();
-180}
-181
-182@Override
-183public long getFileInfoMissCount() 
{
-184  return 
lruCacheStats.getFileInfoMissCount() + 
bucketCacheStats.getFileInfoMissCount();
+177public long getRootIndexMissCount() 
{
+178  return 
lruCacheStats.getRootIndexMissCount() + 
bucketCacheStats.getRootIndexMissCount();
+179}
+180
+181@Override
+182public long 
getIntermediateIndexMissCount() {
+183  return 
lruCacheStats.getIntermediateIndexMissCount() +
+184  

[01/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site e0abe62e9 -> e2f20c831


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable.html
index 81c0f64..ab151cd 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.RegionServerMetricsWrapperRunnable.html
@@ -31,7 +31,7 @@
 023import 
java.util.concurrent.ScheduledExecutorService;
 024import java.util.concurrent.TimeUnit;
 025
-026import 
org.apache.commons.lang.StringUtils;
+026import 
org.apache.commons.lang3.StringUtils;
 027import org.apache.commons.logging.Log;
 028import 
org.apache.commons.logging.LogFactory;
 029import 
org.apache.hadoop.hbase.CompatibilitySingletonFactory;
@@ -115,904 +115,979 @@
 107  private volatile long averageRegionSize 
= 0L;
 108
 109  private CacheStats cacheStats;
-110  private ScheduledExecutorService 
executor;
-111  private Runnable runnable;
-112  private long period;
-113
-114  /**
-115   * Can be null if not on hdfs.
-116   */
-117  private DFSHedgedReadMetrics 
dfsHedgedReadMetrics;
-118
-119  public 
MetricsRegionServerWrapperImpl(final HRegionServer regionServer) {
-120this.regionServer = regionServer;
-121initBlockCache();
-122initMobFileCache();
-123
-124this.period =
-125
regionServer.conf.getLong(HConstants.REGIONSERVER_METRICS_PERIOD,
-126  
HConstants.DEFAULT_REGIONSERVER_METRICS_PERIOD);
-127
-128this.executor = 
CompatibilitySingletonFactory.getInstance(MetricsExecutor.class).getExecutor();
-129this.runnable = new 
RegionServerMetricsWrapperRunnable();
-130
this.executor.scheduleWithFixedDelay(this.runnable, this.period, this.period,
-131  TimeUnit.MILLISECONDS);
-132this.metricsWALSource = 
CompatibilitySingletonFactory.getInstance(MetricsWALSource.class);
-133
-134try {
-135  this.dfsHedgedReadMetrics = 
FSUtils.getDFSHedgedReadMetrics(regionServer.getConfiguration());
-136} catch (IOException e) {
-137  LOG.warn("Failed to get hedged 
metrics", e);
-138}
-139if (LOG.isInfoEnabled()) {
-140  LOG.info("Computing regionserver 
metrics every " + this.period + " milliseconds");
-141}
-142  }
-143
-144  /**
-145   * It's possible that due to threading 
the block cache could not be initialized
-146   * yet (testing multiple region servers 
in one jvm).  So we need to try and initialize
-147   * the blockCache and cacheStats 
reference multiple times until we succeed.
-148   */
-149  private synchronized  void 
initBlockCache() {
-150CacheConfig cacheConfig = 
this.regionServer.cacheConfig;
-151if (cacheConfig != null  
this.blockCache == null) {
-152  this.blockCache = 
cacheConfig.getBlockCache();
-153}
-154
-155if (this.blockCache != null 
 this.cacheStats == null) {
-156  this.cacheStats = 
blockCache.getStats();
-157}
-158  }
-159
-160  /**
-161   * Initializes the mob file cache.
-162   */
-163  private synchronized void 
initMobFileCache() {
-164MobCacheConfig mobCacheConfig = 
this.regionServer.mobCacheConfig;
-165if (mobCacheConfig != null  
this.mobFileCache == null) {
-166  this.mobFileCache = 
mobCacheConfig.getMobFileCache();
-167}
-168  }
-169
-170  @Override
-171  public String getClusterId() {
-172return regionServer.getClusterId();
-173  }
-174
-175  @Override
-176  public long getStartCode() {
-177return regionServer.getStartcode();
-178  }
-179
-180  @Override
-181  public String getZookeeperQuorum() {
-182ZooKeeperWatcher zk = 
regionServer.getZooKeeper();
-183if (zk == null) {
-184  return "";
-185}
-186return zk.getQuorum();
-187  }
-188
-189  @Override
-190  public String getCoprocessors() {
-191String[] coprocessors = 
regionServer.getRegionServerCoprocessors();
-192if (coprocessors == null || 
coprocessors.length == 0) {
-193  return "";
-194}
-195return StringUtils.join(coprocessors, 
", ");
-196  }
-197
-198  @Override
-199  public String getServerName() {
-200ServerName serverName = 
regionServer.getServerName();
-201if (serverName == null) {
-202  return "";
-203}
-204return serverName.getServerName();
-205  }
-206
-207  @Override
-208  public long getNumOnlineRegions() {
-209CollectionRegion 
onlineRegionsLocalContext = regionServer.getOnlineRegionsLocalContext();
-210if (onlineRegionsLocalContext == 
null) {
-211  

[18/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
index 8609df1..601ad09 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.RAMQueueEntry.html
@@ -361,1266 +361,1267 @@
 353return this.cacheEnabled;
 354  }
 355
-356  public long getMaxSize() {
-357return this.cacheCapacity;
-358  }
-359
-360  public String getIoEngine() {
-361return ioEngine.toString();
-362  }
-363
-364  /**
-365   * Get the IOEngine from the IO engine 
name
-366   * @param ioEngineName
-367   * @param capacity
-368   * @param persistencePath
-369   * @return the IOEngine
-370   * @throws IOException
-371   */
-372  private IOEngine 
getIOEngineFromName(String ioEngineName, long capacity, String 
persistencePath)
-373  throws IOException {
-374if (ioEngineName.startsWith("file:") 
|| ioEngineName.startsWith("files:")) {
-375  // In order to make the usage 
simple, we only need the prefix 'files:' in
-376  // document whether one or multiple 
file(s), but also support 'file:' for
-377  // the compatibility
-378  String[] filePaths = 
ioEngineName.substring(ioEngineName.indexOf(":") + 1)
-379  
.split(FileIOEngine.FILE_DELIMITER);
-380  return new FileIOEngine(capacity, 
persistencePath != null, filePaths);
-381} else if 
(ioEngineName.startsWith("offheap")) {
-382  return new 
ByteBufferIOEngine(capacity, true);
-383} else if 
(ioEngineName.startsWith("heap")) {
-384  return new 
ByteBufferIOEngine(capacity, false);
-385} else if 
(ioEngineName.startsWith("mmap:")) {
-386  return new 
FileMmapEngine(ioEngineName.substring(5), capacity);
-387} else {
-388  throw new 
IllegalArgumentException(
-389  "Don't understand io engine 
name for cache - prefix with file:, heap or offheap");
-390}
-391  }
-392
-393  /**
-394   * Cache the block with the specified 
name and buffer.
-395   * @param cacheKey block's cache key
-396   * @param buf block buffer
-397   */
-398  @Override
-399  public void cacheBlock(BlockCacheKey 
cacheKey, Cacheable buf) {
-400cacheBlock(cacheKey, buf, false, 
false);
-401  }
-402
-403  /**
-404   * Cache the block with the specified 
name and buffer.
-405   * @param cacheKey block's cache key
-406   * @param cachedItem block buffer
-407   * @param inMemory if block is 
in-memory
-408   * @param cacheDataInL1
-409   */
-410  @Override
-411  public void cacheBlock(BlockCacheKey 
cacheKey, Cacheable cachedItem, boolean inMemory,
-412  final boolean cacheDataInL1) {
-413cacheBlockWithWait(cacheKey, 
cachedItem, inMemory, wait_when_cache);
-414  }
-415
-416  /**
-417   * Cache the block to ramCache
-418   * @param cacheKey block's cache key
-419   * @param cachedItem block buffer
-420   * @param inMemory if block is 
in-memory
-421   * @param wait if true, blocking wait 
when queue is full
-422   */
-423  public void 
cacheBlockWithWait(BlockCacheKey cacheKey, Cacheable cachedItem, boolean 
inMemory,
-424  boolean wait) {
-425if (LOG.isTraceEnabled()) 
LOG.trace("Caching key=" + cacheKey + ", item=" + cachedItem);
-426if (!cacheEnabled) {
-427  return;
-428}
-429
-430if (backingMap.containsKey(cacheKey)) 
{
-431  return;
-432}
-433
-434/*
-435 * Stuff the entry into the RAM cache 
so it can get drained to the persistent store
-436 */
-437RAMQueueEntry re =
-438new RAMQueueEntry(cacheKey, 
cachedItem, accessCount.incrementAndGet(), inMemory);
-439if (ramCache.putIfAbsent(cacheKey, 
re) != null) {
-440  return;
-441}
-442int queueNum = (cacheKey.hashCode() 
 0x7FFF) % writerQueues.size();
-443BlockingQueueRAMQueueEntry bq 
= writerQueues.get(queueNum);
-444boolean successfulAddition = false;
-445if (wait) {
-446  try {
-447successfulAddition = bq.offer(re, 
DEFAULT_CACHE_WAIT_TIME, TimeUnit.MILLISECONDS);
-448  } catch (InterruptedException e) 
{
-449
Thread.currentThread().interrupt();
-450  }
-451} else {
-452  successfulAddition = 
bq.offer(re);
-453}
-454if (!successfulAddition) {
-455  ramCache.remove(cacheKey);
-456  cacheStats.failInsert();
-457} else {
-458  
this.blockNumber.incrementAndGet();
-459  
this.heapSize.addAndGet(cachedItem.heapSize());
-460  blocksByHFile.add(cacheKey);
-461}
-462  }
-463
-464  /**
-465   * Get the buffer of the block with the 
specified key.
-466   * @param key block's cache key
-467   * @param caching true if the caller 
caches 

[31/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html
index 0ae39da..86cf8a2 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapperImpl.html
@@ -18,7 +18,7 @@
 catch(err) {
 }
 //-->
-var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10};
+var methods = 
{"i0":10,"i1":10,"i2":10,"i3":10,"i4":10,"i5":10,"i6":10,"i7":10,"i8":10,"i9":10,"i10":10,"i11":10,"i12":10,"i13":10,"i14":10,"i15":10,"i16":10,"i17":10,"i18":10,"i19":10,"i20":10,"i21":10,"i22":10,"i23":10,"i24":10,"i25":10,"i26":10,"i27":10,"i28":10,"i29":10,"i30":10,"i31":10,"i32":10,"i33":10,"i34":10,"i35":10,"i36":10,"i37":10,"i38":10,"i39":10,"i40":10,"i41":10,"i42":10,"i43":10,"i44":10,"i45":10,"i46":10,"i47":10,"i48":10,"i49":10,"i50":10,"i51":10,"i52":10,"i53":10,"i54":10,"i55":10,"i56":10,"i57":10,"i58":10,"i59":10,"i60":10,"i61":10,"i62":10,"i63":10,"i64":10,"i65":10,"i66":10,"i67":10,"i68":10,"i69":10,"i70":10,"i71":10,"i72":10,"i73":10,"i74":10,"i75":10,"i76":10,"i77":10,"i78":10,"i79":10,"i80":10,"i81":10,"i82":10,"i83":10,"i84":10,"i85":10,"i86":10,"i87":10,"i88":10,"i89":10,"i90":10,"i91":10,"i92":10,"i93":10,"i94":10,"i95":10,"i96":10,"i97":10,"i98":10,"i99":10,"i100":10,"i101":10,"i102":10,"i103":10,"i104":10,"i105":10,"i106":10,"i107":10,"i108":10,"i
 109":10};
 var tabs = {65535:["t0","All Methods"],2:["t2","Instance 
Methods"],8:["t4","Concrete Methods"]};
 var altColor = "altColor";
 var rowColor = "rowColor";
@@ -238,6 +238,14 @@ implements flushedCellsSize
 
 
+private CacheStats
+l1Stats
+
+
+private CacheStats
+l2Stats
+
+
 private static 
org.apache.commons.logging.Log
 LOG
 
@@ -659,303 +667,355 @@ implements getIntermediateIndexMissCount()
 
 
+long
+getL1CacheHitCount()
+Hit count of L1 cache.
+
+
+
+double
+getL1CacheHitRatio()
+Hit ratio of L1 cache.
+
+
+
+long
+getL1CacheMissCount()
+Miss count of L1 cache.
+
+
+
+double
+getL1CacheMissRatio()
+Miss ratio of L1 cache.
+
+
+
+long
+getL2CacheHitCount()
+Hit count of L2 cache.
+
+
+
+double
+getL2CacheHitRatio()
+Hit ratio of L2 cache.
+
+
+
+long
+getL2CacheMissCount()
+Miss count of L2 cache.
+
+
+
+double
+getL2CacheMissRatio()
+Miss ratio of L2 cache.
+
+
+
 int
 getLargeCompactionQueueSize()
 
-
+
 long
 getLeafIndexHitCount()
 
-
+
 long
 getLeafIndexMissCount()
 
-
+
 long
 getMajorCompactedCellsCount()
 Get the number of cells processed during major 
compactions.
 
 
-
+
 long
 getMajorCompactedCellsSize()
 Get the total amount of data processed during major 
compactions, in bytes.
 
 
-
+
 long
 getMaxStoreFileAge()
 
-
+
+long
+getMemstoreLimit()
+
+
 long
 getMemstoreSize()
 Get the size of the memstore on this region server.
 
 
-
+
 long
 getMetaHitCount()
 
-
+
 long
 getMetaMissCount()
 
-
+
 long
 getMinStoreFileAge()
 
-
+
 long
 getMobFileCacheAccessCount()
 Gets the count of accesses to the mob file cache.
 
 
-
+
 long
 getMobFileCacheCount()
 Gets the count of cached mob files.
 
 
-
+
 long
 getMobFileCacheEvictedCount()
 Gets the number of items evicted from the mob file 
cache.
 
 
-
+
 double
 getMobFileCacheHitPercent()
 Gets the hit percent to the mob file cache.
 
 
-
+
 long
 getMobFileCacheMissCount()
 Gets the count of misses to the mob file cache.
 
 
-
+
 long
 getMobFlushCount()
 Gets the number of the flushes in mob-enabled stores.
 
 
-
+
 long
 getMobFlushedCellsCount()
 Gets the number of mob cells flushed to disk.
 
 
-
+
 long
 getMobFlushedCellsSize()
 Gets the total amount of mob cells flushed to disk, in 
bytes.
 
 
-
+
 long
 getMobScanCellsCount()
 Gets the number of scanned mob cells.
 
 
-
+
 long
 getMobScanCellsSize()
 Gets the total amount of scanned mob cells, in bytes.
 
 
-
+
 long
 

[33/51] [partial] hbase-site git commit: Published site at .

2017-09-06 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/e2f20c83/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.html
index a8330b7..bd68e2b 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSourceImpl.html
@@ -332,7 +332,7 @@ implements MetricsRegionServerSource
-APPEND_KEY,
 AVERAGE_REGION_SIZE,
 AVERAGE_REGION_SIZE_DESC,
 AVG_STORE_FILE_AGE,
 AVG_STORE_FILE_AGE_DESC,
 BLOCK_CACHE_BLOOM_CHUNK_HIT_COUNT,
 BLOCK_CAC
 HE_BLOOM_CHUNK_MISS_COUNT, BLOCK_CACHE_COUNT,
 BLOCK_CACHE_COUNT_DESC,
 BLOCK_CACHE_DATA_HIT_COUNT,
 BLOCK_CACHE_DATA_MISS_COUNT,
 BLOCK_CACHE_DELETE_FAMILY_BLOOM_HIT_COUNT,
 BLOCK_CACHE_DELETE_FAMILY_BLOOM_MISS_COUNT,
 BLOCK_CACHE_ENCODED_DATA_HIT_COUNT,
 BLOCK_CACHE_ENCODED_DATA_MISS_COUNT,
 BLOCK_CACHE_EVICTION_COUNT,
 BLOCK_CACHE_EVICTION_COUNT_DESC,
 BLOCK_CACHE_EXPRESS_HIT_PERCENT,
 BLOCK_CACHE_EXPRESS_HIT_PERCENT_DESC,
 BLOCK_CACHE_FAILED_INSERTION_COUNT,
 BLOCK_CACHE_FAILED_INSERTION_COUNT_DESC,
 BLOCK_CACHE_FILE_INFO_HIT_COUNT,
 BLOCK_CACHE_FILE_INFO_MISS_COUNT,
 BLOCK_CACHE_FREE_DESC,
 BLOCK_CACHE_FREE_SIZE,
 BLOCK_CACHE_GENERAL_BLOOM_META_HIT_COUNT,
 BLOCK_CACHE_GENERAL_BLOOM_META_MISS_COUNT,
 BLOCK_CACHE_HIT_COUNT,
 BLOCK_CACHE_HIT_COUNT_DESC,
 BLOCK_CACHE_HIT_PERCENT,
 BLOCK_CACHE_HIT_PERCENT_DESC,
 BLOCK_CACHE_INTERMEDIATE_INDEX_HIT_COUNT, BLOCK_CACHE_INTERMEDIATE_INDEX_MISS_COUNT,
 BLOCK_CACHE_LEAF_INDEX_HIT_COUNT,
 BLOCK_CACHE_LEAF_INDEX_MISS_COUNT,
 BLOCK_CACHE_META_HIT_COUNT,
 BLOCK_CACHE_META_MISS_COUNT,
 BLOCK_CACHE_MISS_COUNT, BLOCK_CACHE_PRIMARY_EVICTION_COUNT,
 BLOCK_CACHE_PRIMARY_EVICTION_COUNT_DESC,
 BLOCK_CACHE_PRIMARY_HIT_COUNT,
 BLOCK_CACHE_PRIMARY_HIT_COUNT_DESC,
 BLOCK_CACHE_PRIMARY_MISS_COUNT,
 B
 LOCK_CACHE_ROOT_INDEX_HIT_COUNT, BLOCK_CACHE_ROOT_INDEX_MISS_COUNT,
 BLOCK_CACHE_SIZE,
 BLOCK_CACHE_SIZE_DESC,
 BLOCK_CACHE_TRAILER_HIT_COUNT,
 BLOCK_CACHE_TRAILER_MISS_COUNT,
 BLOCK_COUNT_MISS_COUNT_DESC,
 BLOCK_COUNT_PRIMARY_MISS_COUNT_DESC,
 BLOCKED_REQUESTS_COUNT,
 BLOCKED_REQUESTS_COUNT_DESC,
 CELLS_COUNT_COMPACTED_FROM_MOB,
 CELLS_COUNT_COMPACTED_FROM_MOB_DESC,
 CELLS_COUNT_COMPACTED_TO_MOB,
 CELLS_COUNT_COMPACTED_TO_MOB_DESC, CELLS_SIZE_COMPACTED_FROM_MOB,
 CELLS_SIZE_COMPACTED_FROM_MOB_DESC,
 CELLS_SIZE_COMPACTED_TO_MOB,
 CELLS_SIZE_COMPACTED_TO_MOB_DESC,
 CHECK_AND_DELETE_KEY,
 CHECK_AND_PUT_KEY,
 CHECK_MUTATE_FAILED_COUNT,
 CHECK_MUTATE_FAILED_COUNT_DESC,
 CHECK_MUTATE_PASSED_COUNT,
 CHECK_MUTATE_PASSED_COUNT_DESC,
 CLUSTER_ID_DESC,
 CLUSTER_ID_NAME,
 COMPACTED_CE
 LLS, COMPACTED_CELLS_DESC,
 COMPACTED_CELLS_SIZE,
 COMPACTED_CELLS_SIZE_DESC,
 COMPACTED_INPUT_BYTES,
 COMPACTED_INPUT_BYTES_DESC,
 COMPACTED_OUTPUT_BYTES,
 COMPACTED_OUTPUT_BYTES_DESC, href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html#COMPACTION_INPUT_FILE_COUNT">COMPACTION_INPUT_FILE_COUNT,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html#COMPACTION_INPUT_FILE_COUNT_DESC">COMPACTION_INPUT_FILE_COUNT_DESC,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html#COMPACTION_INPUT_SIZE">COMPACTION_INPUT_SIZE,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html#COMPACTION_INPUT_SIZE_DESC">COMPACTION_INPUT_SIZE_DESC,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html#COMPACTION_OUTPUT_FILE_COUNT">COMPACTION_OUTPUT_FILE_COUNT,
 > href="../../../../../org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.html#COMPACTION_OUTPUT_FILE_COUNT_DESC">COMPACTION_OUTPUT_FILE_COUNT_DESC,
 > COMPACTION_OUTPUT_SIZE,
 COMPACTION_OUTPUT_SIZE_DESC,
 COMPACTION_QUEUE_LENGTH,
 COMPACTION_QUEUE_LENGTH_DESC,
 COMPACTION_TIME,
 COMPACTION_TIME_DESC,
 DATA_SIZE_WITHOUT_WAL,
 DATA_SIZE_WITHOUT_WAL_DESC,
 DELETE_BATCH_KEY,
 DELETE_KEY,
 FILTERED_READ_REQUEST_COUNT,
 FILTERED_READ_REQUEST_COUNT_DESC,
 FLUSH_MEMSTORE_SIZE,
 FLUSH_MEMSTORE_SIZE_DESC,
 FLUSH_OUTPUT_SIZE,
 FLUSH_OUTPUT_SIZE_DESC,
 FLUSH_QUEUE_LENGTH,
 FLUSH_QUEUE_LENGTH_DESC,
 FLUSH_TIME,
 FLUSH_TIME_DESC,
 FLUSHED_CELLS,
 FLUSHED_CELLS_DESC, FLUSHED_CELLS_SIZE,
 FLUSHED_CELLS_SIZE_DESC,
 FLUSHED_MEMSTORE_BYTES,
 FLUSHED_MEMSTORE_BYTES_DESC,
 FLUSHED_OUTPUT_BYTES,
 FLUSHED_OUTPUT_BYTES_DESC,
 GET_KEY, GET_SIZE_KEY,
 HEDGED_READ_WINS,
 HEDGED_READ_WINS_DESC,
 HEDGED_READS,
 HEDGED_READS_DESC,
 INCREMENT_KEY,
 LARGE_COMPACTION_QUEUE_LENGTH
 , LARGE_COMPACTION_QUEUE_LENGTH_DESC,
 MAJOR_COMPACTED_CELLS,
 MAJOR_COMPACTED_CELLS_DESC,
 MAJOR_COMPACTED_CELLS_SIZE,
 MAJOR_COMPACTED_CELLS_SIZE_DESC,
 MAJOR_COMPACTED_INPUT_BYTES,
 MAJOR_COMPACTED_INPUT_BYTES_DESC,
 

hbase git commit: HBASE-17713 the interface '/version/cluster' with header 'Accept: application/json' return is not JSON but plain text

2017-09-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 064f882cb -> 975c31b3b


HBASE-17713 the interface '/version/cluster' with header 'Accept: 
application/json' return is not JSON but plain text

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/975c31b3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/975c31b3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/975c31b3

Branch: refs/heads/branch-1.4
Commit: 975c31b3bab9298e3de21e58c391b228eb7fe156
Parents: 064f882
Author: Guangxu Cheng 
Authored: Wed Sep 6 12:55:29 2017 +0800
Committer: tedyu 
Committed: Wed Sep 6 07:36:23 2017 -0700

--
 .../rest/model/StorageClusterVersionModel.java   |  4 ++--
 .../hadoop/hbase/rest/TestVersionResource.java   | 19 +--
 .../hadoop/hbase/rest/client/TestXmlParsing.java |  2 +-
 .../model/TestStorageClusterVersionModel.java|  4 ++--
 4 files changed, 22 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/975c31b3/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
--
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
index 54fc8de..bf163f2 100644
--- 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
+++ 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
@@ -23,6 +23,7 @@ import org.codehaus.jackson.annotate.JsonValue;
 
 import java.io.Serializable;
 
+import javax.xml.bind.annotation.XmlAttribute;
 import javax.xml.bind.annotation.XmlRootElement;
 import javax.xml.bind.annotation.XmlValue;
 
@@ -47,7 +48,7 @@ public class StorageClusterVersionModel implements 
Serializable {
   /**
* @return the storage cluster version
*/
-  @XmlValue
+  @XmlAttribute(name="Version")
   public String getVersion() {
 return version;
   }
@@ -62,7 +63,6 @@ public class StorageClusterVersionModel implements 
Serializable {
   /* (non-Javadoc)
* @see java.lang.Object#toString()
*/
-  @JsonValue
   @Override
   public String toString() {
 return version;

http://git-wip-us.apache.org/repos/asf/hbase/blob/975c31b3/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
--
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
index cbacc40..60a4340 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
@@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.rest;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 
+import javax.ws.rs.core.MediaType;
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
 
@@ -38,6 +39,8 @@ import org.apache.hadoop.hbase.util.Bytes;
 
 import static org.junit.Assert.*;
 
+import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
+import org.codehaus.jackson.map.ObjectMapper;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -128,6 +131,12 @@ public class TestVersionResource {
 Response response = client.get("/version", Constants.MIMETYPE_JSON);
 assertTrue(response.getCode() == 200);
 assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
+ObjectMapper mapper = new JacksonJaxbJsonProvider()
+.locateMapper(VersionModel.class, MediaType.APPLICATION_JSON_TYPE);
+VersionModel model
+= mapper.readValue(response.getBody(), VersionModel.class);
+validate(model);
+LOG.info("success retrieving Stargate version as JSON");
   }
 
   @Test
@@ -169,11 +178,17 @@ public class TestVersionResource {
   }
 
   @Test
-  public void doTestGetStorageClusterVersionJSON() throws IOException {
+  public void testGetStorageClusterVersionJSON() throws IOException {
 Response response = client.get("/version/cluster", 
Constants.MIMETYPE_JSON);
 assertTrue(response.getCode() == 200);
 assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
+ObjectMapper mapper = new JacksonJaxbJsonProvider()
+.locateMapper(StorageClusterVersionModel.class, 
MediaType.APPLICATION_JSON_TYPE);
+StorageClusterVersionModel clusterVersionModel
+= mapper.readValue(response.getBody(), 

hbase git commit: HBASE-17713 the interface '/version/cluster' with header 'Accept: application/json' return is not JSON but plain text

2017-09-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-1 4733aa465 -> 13da97d85


HBASE-17713 the interface '/version/cluster' with header 'Accept: 
application/json' return is not JSON but plain text

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/13da97d8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/13da97d8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/13da97d8

Branch: refs/heads/branch-1
Commit: 13da97d85efcf81200d8f081a87c46b43339110c
Parents: 4733aa4
Author: Guangxu Cheng 
Authored: Wed Sep 6 12:55:29 2017 +0800
Committer: tedyu 
Committed: Wed Sep 6 07:36:01 2017 -0700

--
 .../rest/model/StorageClusterVersionModel.java   |  4 ++--
 .../hadoop/hbase/rest/TestVersionResource.java   | 19 +--
 .../hadoop/hbase/rest/client/TestXmlParsing.java |  2 +-
 .../model/TestStorageClusterVersionModel.java|  4 ++--
 4 files changed, 22 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/13da97d8/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
--
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
index 54fc8de..bf163f2 100644
--- 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
+++ 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
@@ -23,6 +23,7 @@ import org.codehaus.jackson.annotate.JsonValue;
 
 import java.io.Serializable;
 
+import javax.xml.bind.annotation.XmlAttribute;
 import javax.xml.bind.annotation.XmlRootElement;
 import javax.xml.bind.annotation.XmlValue;
 
@@ -47,7 +48,7 @@ public class StorageClusterVersionModel implements 
Serializable {
   /**
* @return the storage cluster version
*/
-  @XmlValue
+  @XmlAttribute(name="Version")
   public String getVersion() {
 return version;
   }
@@ -62,7 +63,6 @@ public class StorageClusterVersionModel implements 
Serializable {
   /* (non-Javadoc)
* @see java.lang.Object#toString()
*/
-  @JsonValue
   @Override
   public String toString() {
 return version;

http://git-wip-us.apache.org/repos/asf/hbase/blob/13da97d8/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
--
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
index cbacc40..60a4340 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
@@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.rest;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 
+import javax.ws.rs.core.MediaType;
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
 
@@ -38,6 +39,8 @@ import org.apache.hadoop.hbase.util.Bytes;
 
 import static org.junit.Assert.*;
 
+import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
+import org.codehaus.jackson.map.ObjectMapper;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -128,6 +131,12 @@ public class TestVersionResource {
 Response response = client.get("/version", Constants.MIMETYPE_JSON);
 assertTrue(response.getCode() == 200);
 assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
+ObjectMapper mapper = new JacksonJaxbJsonProvider()
+.locateMapper(VersionModel.class, MediaType.APPLICATION_JSON_TYPE);
+VersionModel model
+= mapper.readValue(response.getBody(), VersionModel.class);
+validate(model);
+LOG.info("success retrieving Stargate version as JSON");
   }
 
   @Test
@@ -169,11 +178,17 @@ public class TestVersionResource {
   }
 
   @Test
-  public void doTestGetStorageClusterVersionJSON() throws IOException {
+  public void testGetStorageClusterVersionJSON() throws IOException {
 Response response = client.get("/version/cluster", 
Constants.MIMETYPE_JSON);
 assertTrue(response.getCode() == 200);
 assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
+ObjectMapper mapper = new JacksonJaxbJsonProvider()
+.locateMapper(StorageClusterVersionModel.class, 
MediaType.APPLICATION_JSON_TYPE);
+StorageClusterVersionModel clusterVersionModel
+= mapper.readValue(response.getBody(), 

hbase git commit: HBASE-17713 the interface '/version/cluster' with header 'Accept: application/json' return is not JSON but plain text

2017-09-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/branch-2 f0c1cd50c -> d7a74a75a


HBASE-17713 the interface '/version/cluster' with header 'Accept: 
application/json' return is not JSON but plain text

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d7a74a75
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d7a74a75
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d7a74a75

Branch: refs/heads/branch-2
Commit: d7a74a75a133810ec5159ede1e451b2e0f281310
Parents: f0c1cd5
Author: Guangxu Cheng 
Authored: Wed Sep 6 11:58:24 2017 +0800
Committer: tedyu 
Committed: Wed Sep 6 07:31:56 2017 -0700

--
 .../rest/model/StorageClusterVersionModel.java   |  4 ++--
 .../hadoop/hbase/rest/TestVersionResource.java   | 19 +--
 .../hadoop/hbase/rest/client/TestXmlParsing.java |  2 +-
 .../model/TestStorageClusterVersionModel.java|  4 ++--
 4 files changed, 22 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d7a74a75/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
--
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
index e332d49..548ca20 100644
--- 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
+++ 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
@@ -23,6 +23,7 @@ import org.codehaus.jackson.annotate.JsonValue;
 
 import java.io.Serializable;
 
+import javax.xml.bind.annotation.XmlAttribute;
 import javax.xml.bind.annotation.XmlRootElement;
 import javax.xml.bind.annotation.XmlValue;
 
@@ -47,7 +48,7 @@ public class StorageClusterVersionModel implements 
Serializable {
   /**
* @return the storage cluster version
*/
-  @XmlValue
+  @XmlAttribute(name="Version")
   public String getVersion() {
 return version;
   }
@@ -62,7 +63,6 @@ public class StorageClusterVersionModel implements 
Serializable {
   /* (non-Javadoc)
* @see java.lang.Object#toString()
*/
-  @JsonValue
   @Override
   public String toString() {
 return version;

http://git-wip-us.apache.org/repos/asf/hbase/blob/d7a74a75/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
--
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
index b96a5d5..99fce2c 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.rest;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 
+import javax.ws.rs.core.MediaType;
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
 
@@ -35,6 +36,8 @@ import org.apache.hadoop.hbase.rest.model.VersionModel;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RestTests;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
+import org.codehaus.jackson.map.ObjectMapper;
 import org.glassfish.jersey.servlet.ServletContainer;
 
 import static org.junit.Assert.*;
@@ -128,6 +131,12 @@ public class TestVersionResource {
 Response response = client.get("/version", Constants.MIMETYPE_JSON);
 assertTrue(response.getCode() == 200);
 assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
+ObjectMapper mapper = new JacksonJaxbJsonProvider()
+.locateMapper(VersionModel.class, MediaType.APPLICATION_JSON_TYPE);
+VersionModel model
+= mapper.readValue(response.getBody(), VersionModel.class);
+validate(model);
+LOG.info("success retrieving Stargate version as JSON");
   }
 
   @Test
@@ -169,11 +178,17 @@ public class TestVersionResource {
   }
 
   @Test
-  public void doTestGetStorageClusterVersionJSON() throws IOException {
+  public void testGetStorageClusterVersionJSON() throws IOException {
 Response response = client.get("/version/cluster", 
Constants.MIMETYPE_JSON);
 assertTrue(response.getCode() == 200);
 assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
+ObjectMapper mapper = new JacksonJaxbJsonProvider()
+.locateMapper(StorageClusterVersionModel.class, 

hbase git commit: HBASE-17713 the interface '/version/cluster' with header 'Accept: application/json' return is not JSON but plain text

2017-09-06 Thread tedyu
Repository: hbase
Updated Branches:
  refs/heads/master 2f14a054f -> 7fb52e73f


HBASE-17713 the interface '/version/cluster' with header 'Accept: 
application/json' return is not JSON but plain text

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7fb52e73
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7fb52e73
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7fb52e73

Branch: refs/heads/master
Commit: 7fb52e73f340f41792e3a5aed5665402c4bb383c
Parents: 2f14a05
Author: Guangxu Cheng 
Authored: Wed Sep 6 11:58:24 2017 +0800
Committer: tedyu 
Committed: Wed Sep 6 07:31:01 2017 -0700

--
 .../rest/model/StorageClusterVersionModel.java   |  4 ++--
 .../hadoop/hbase/rest/TestVersionResource.java   | 19 +--
 .../hadoop/hbase/rest/client/TestXmlParsing.java |  2 +-
 .../model/TestStorageClusterVersionModel.java|  4 ++--
 4 files changed, 22 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7fb52e73/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
--
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
index e332d49..548ca20 100644
--- 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
+++ 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/StorageClusterVersionModel.java
@@ -23,6 +23,7 @@ import org.codehaus.jackson.annotate.JsonValue;
 
 import java.io.Serializable;
 
+import javax.xml.bind.annotation.XmlAttribute;
 import javax.xml.bind.annotation.XmlRootElement;
 import javax.xml.bind.annotation.XmlValue;
 
@@ -47,7 +48,7 @@ public class StorageClusterVersionModel implements 
Serializable {
   /**
* @return the storage cluster version
*/
-  @XmlValue
+  @XmlAttribute(name="Version")
   public String getVersion() {
 return version;
   }
@@ -62,7 +63,6 @@ public class StorageClusterVersionModel implements 
Serializable {
   /* (non-Javadoc)
* @see java.lang.Object#toString()
*/
-  @JsonValue
   @Override
   public String toString() {
 return version;

http://git-wip-us.apache.org/repos/asf/hbase/blob/7fb52e73/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
--
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
index b96a5d5..99fce2c 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestVersionResource.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.rest;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 
+import javax.ws.rs.core.MediaType;
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
 
@@ -35,6 +36,8 @@ import org.apache.hadoop.hbase.rest.model.VersionModel;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RestTests;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
+import org.codehaus.jackson.map.ObjectMapper;
 import org.glassfish.jersey.servlet.ServletContainer;
 
 import static org.junit.Assert.*;
@@ -128,6 +131,12 @@ public class TestVersionResource {
 Response response = client.get("/version", Constants.MIMETYPE_JSON);
 assertTrue(response.getCode() == 200);
 assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
+ObjectMapper mapper = new JacksonJaxbJsonProvider()
+.locateMapper(VersionModel.class, MediaType.APPLICATION_JSON_TYPE);
+VersionModel model
+= mapper.readValue(response.getBody(), VersionModel.class);
+validate(model);
+LOG.info("success retrieving Stargate version as JSON");
   }
 
   @Test
@@ -169,11 +178,17 @@ public class TestVersionResource {
   }
 
   @Test
-  public void doTestGetStorageClusterVersionJSON() throws IOException {
+  public void testGetStorageClusterVersionJSON() throws IOException {
 Response response = client.get("/version/cluster", 
Constants.MIMETYPE_JSON);
 assertTrue(response.getCode() == 200);
 assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
+ObjectMapper mapper = new JacksonJaxbJsonProvider()
+.locateMapper(StorageClusterVersionModel.class, 

hbase git commit: HBASE-18749 Apply the CF specific TimeRange from Scan to filter the segment scanner

2017-09-06 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 cd1b964bb -> f0c1cd50c


HBASE-18749 Apply the CF specific TimeRange from Scan to filter the segment 
scanner


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f0c1cd50
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f0c1cd50
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f0c1cd50

Branch: refs/heads/branch-2
Commit: f0c1cd50c27d9064f27508cf3fecc10ca92f9dd2
Parents: cd1b964
Author: Chia-Ping Tsai 
Authored: Wed Sep 6 20:01:10 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Sep 6 20:01:10 2017 +0800

--
 .../hadoop/hbase/regionserver/CompositeImmutableSegment.java | 8 
 .../apache/hadoop/hbase/regionserver/ImmutableSegment.java   | 5 ++---
 .../org/apache/hadoop/hbase/regionserver/MutableSegment.java | 6 +++---
 .../java/org/apache/hadoop/hbase/regionserver/Segment.java   | 4 ++--
 .../org/apache/hadoop/hbase/regionserver/SegmentScanner.java | 3 ++-
 5 files changed, 13 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f0c1cd50/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
index 16d0a42..30d0d8d 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
@@ -18,12 +18,12 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
-import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.logging.Log;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.io.TimeRange;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 
 import java.util.ArrayList;
 import java.util.Iterator;
@@ -122,7 +122,7 @@ public class CompositeImmutableSegment extends 
ImmutableSegment {
   }
 
   @Override
-  public boolean shouldSeek(Scan scan, long oldestUnexpiredTS){
+  public boolean shouldSeek(TimeRange tr, long oldestUnexpiredTS){
 throw new IllegalStateException("Not supported by 
CompositeImmutableScanner");
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/f0c1cd50/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableSegment.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableSegment.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableSegment.java
index 19b66b4..39ad4c6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableSegment.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableSegment.java
@@ -24,7 +24,6 @@ import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.util.ClassSize;
-import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.TimeRange;
 
 import java.io.IOException;
@@ -130,8 +129,8 @@ public class ImmutableSegment extends Segment {
   /  PUBLIC METHODS  /
 
   @Override
-  public boolean shouldSeek(Scan scan, long oldestUnexpiredTS) {
-return this.timeRange.includesTimeRange(scan.getTimeRange()) &&
+  public boolean shouldSeek(TimeRange tr, long oldestUnexpiredTS) {
+return this.timeRange.includesTimeRange(tr) &&
 this.timeRange.getMax() >= oldestUnexpiredTS;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/f0c1cd50/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java
index 0b8f983..8c7b3c3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java
@@ -27,7 +27,7 @@ import 

hbase git commit: HBASE-18749 Apply the CF specific TimeRange from Scan to filter the segment scanner

2017-09-06 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master f36fb11ea -> 2f14a054f


HBASE-18749 Apply the CF specific TimeRange from Scan to filter the segment 
scanner


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2f14a054
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2f14a054
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2f14a054

Branch: refs/heads/master
Commit: 2f14a054f8d6497dee4a65f0206f5bf522477ef9
Parents: f36fb11
Author: Chia-Ping Tsai 
Authored: Tue Sep 5 22:37:38 2017 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Sep 6 19:40:51 2017 +0800

--
 .../hadoop/hbase/regionserver/CompositeImmutableSegment.java | 8 
 .../apache/hadoop/hbase/regionserver/ImmutableSegment.java   | 5 ++---
 .../org/apache/hadoop/hbase/regionserver/MutableSegment.java | 6 +++---
 .../java/org/apache/hadoop/hbase/regionserver/Segment.java   | 4 ++--
 .../org/apache/hadoop/hbase/regionserver/SegmentScanner.java | 3 ++-
 5 files changed, 13 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2f14a054/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
index c73e5f5..0b07fe3 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompositeImmutableSegment.java
@@ -18,12 +18,12 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
-import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.logging.Log;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparator;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.io.TimeRange;
+import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
 
 import java.util.ArrayList;
 import java.util.Iterator;
@@ -122,7 +122,7 @@ public class CompositeImmutableSegment extends 
ImmutableSegment {
   }
 
   @Override
-  public boolean shouldSeek(Scan scan, long oldestUnexpiredTS){
+  public boolean shouldSeek(TimeRange tr, long oldestUnexpiredTS){
 throw new IllegalStateException("Not supported by 
CompositeImmutableScanner");
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/2f14a054/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableSegment.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableSegment.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableSegment.java
index 9d53c7c..bca5fe8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableSegment.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/ImmutableSegment.java
@@ -22,7 +22,6 @@ package org.apache.hadoop.hbase.regionserver;
 import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.util.ClassSize;
-import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.io.TimeRange;
 
 import java.util.ArrayList;
@@ -79,8 +78,8 @@ public abstract class ImmutableSegment extends Segment {
 
   /  PUBLIC METHODS  /
   @Override
-  public boolean shouldSeek(Scan scan, long oldestUnexpiredTS) {
-return this.timeRange.includesTimeRange(scan.getTimeRange()) &&
+  public boolean shouldSeek(TimeRange tr, long oldestUnexpiredTS) {
+return this.timeRange.includesTimeRange(tr) &&
 this.timeRange.getMax() >= oldestUnexpiredTS;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/2f14a054/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java
index 0ec250a..8c09930 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MutableSegment.java
@@ -27,7 +27,7 @@ import