[17/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
new file mode 100644
index 000..864b7f1
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/AssignmentManager.java
@@ -0,0 +1,1709 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.master.assignment;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.stream.Collectors;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseIOException;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.PleaseHoldException;
+import org.apache.hadoop.hbase.RegionException;
+import org.apache.hadoop.hbase.RegionStateListener;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.TableState;
+import org.apache.hadoop.hbase.exceptions.UnexpectedStateException;
+import org.apache.hadoop.hbase.master.balancer.FavoredStochasticBalancer;
+import org.apache.hadoop.hbase.favored.FavoredNodesManager;
+import org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
+import org.apache.hadoop.hbase.master.AssignmentListener;
+import org.apache.hadoop.hbase.master.LoadBalancer;
+import org.apache.hadoop.hbase.master.MasterServices;
+import org.apache.hadoop.hbase.master.MetricsAssignmentManager;
+import org.apache.hadoop.hbase.master.NoSuchProcedureException;
+import org.apache.hadoop.hbase.master.RegionPlan;
+import org.apache.hadoop.hbase.master.RegionState;
+import org.apache.hadoop.hbase.master.RegionState.State;
+import org.apache.hadoop.hbase.master.ServerListener;
+import org.apache.hadoop.hbase.master.TableStateManager;
+import org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
+import org.apache.hadoop.hbase.master.assignment.RegionStates.ServerState;
+import org.apache.hadoop.hbase.master.assignment.RegionStates.ServerStateNode;
+// TODO: why are they here?
+import org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureScheduler;
+import org.apache.hadoop.hbase.master.procedure.ProcedureSyncWait;
+import org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure;
+import org.apache.hadoop.hbase.procedure2.Procedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureEvent;
+import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
+import org.apache.hadoop.hbase.procedure2.ProcedureInMemoryChore;
+import org.apache.hadoop.hbase.procedure2.util.StringUtils;
+import org.apache.hadoop.hbase.quotas.QuotaExceededException;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;
+import 

[09/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
index 66c5abf..aef67bf 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hbase.client;
 
 import java.util.List;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -32,7 +33,6 @@ import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
-import org.apache.hadoop.hbase.regionserver.CompactingMemStore;
 import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
 import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
 import org.apache.hadoop.hbase.testclassification.ClientTests;

http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java
index 852c5cf..10f466d 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSplitOrMergeStatus.java
@@ -17,19 +17,24 @@
  */
 package org.apache.hadoop.hbase.client;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.regionserver.HRegion;
-import org.apache.hadoop.hbase.regionserver.Region;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.JVMClusterUtil;
+import org.apache.hadoop.hbase.util.Threads;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Rule;
@@ -37,17 +42,9 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
 
-import java.io.IOException;
-import java.util.List;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
 @Category({MediumTests.class, ClientTests.class})
 public class TestSplitOrMergeStatus {
 
-  private static final Log LOG = 
LogFactory.getLog(TestSplitOrMergeStatus.class);
   private final static HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
   private static byte [] FAMILY = Bytes.toBytes("testFamily");
 
@@ -77,7 +74,7 @@ public class TestSplitOrMergeStatus {
 TEST_UTIL.loadTable(t, FAMILY, false);
 
 RegionLocator locator = 
TEST_UTIL.getConnection().getRegionLocator(t.getName());
-int orignalCount = locator.getAllRegionLocations().size();
+int originalCount = locator.getAllRegionLocations().size();
 
 Admin admin = TEST_UTIL.getAdmin();
 initSwitchStatus(admin);
@@ -85,14 +82,17 @@ public class TestSplitOrMergeStatus {
 assertEquals(results.length, 1);
 assertTrue(results[0]);
 admin.split(t.getName());
-int count = waitOnSplitOrMerge(t).size();
-assertTrue(orignalCount == count);
+int count = admin.getTableRegions(tableName).size();
+assertTrue(originalCount == count);
 results = admin.setSplitOrMergeEnabled(true, false, 
MasterSwitchType.SPLIT);
 assertEquals(results.length, 1);
 assertFalse(results[0]);
 admin.split(t.getName());
-count = waitOnSplitOrMerge(t).size();
-assertTrue(orignalCount

[27/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java
index 06a4e01..e83a7ac 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AccessControlProtos.java
@@ -1024,7 +1024,7 @@ public final class AccessControlProtos {
* optional .hbase.pb.GlobalPermission global_permission = 
2;
*/
   private 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermissionOrBuilder>
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermission.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GlobalPermissionOrBuilder>
 
   getGlobalPermissionFieldBuilder() {
 if (globalPermissionBuilder_ == null) {
   globalPermissionBuilder_ = new 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -1142,7 +1142,7 @@ public final class AccessControlProtos {
* optional .hbase.pb.NamespacePermission namespace_permission = 
3;
*/
   private 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermissionOrBuilder>
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermission.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.NamespacePermissionOrBuilder>
 
   getNamespacePermissionFieldBuilder() {
 if (namespacePermissionBuilder_ == null) {
   namespacePermissionBuilder_ = new 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -1260,7 +1260,7 @@ public final class AccessControlProtos {
* optional .hbase.pb.TablePermission table_permission = 4;
*/
   private 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermissionOrBuilder>
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermission.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.TablePermissionOrBuilder>
 
   getTablePermissionFieldBuilder() {
 if (tablePermissionBuilder_ == null) {
   tablePermissionBuilder_ = new 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -2074,7 +2074,7 @@ public final class AccessControlProtos {
* optional .hbase.pb.TableName table_name = 1;
*/
   private 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
 
   getTableNameFieldBuilder() {
 if (tableNameBuilder_ == null) {
   tableNameBuilder_ = new 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -4130,7 +4130,7 @@ public final class AccessControlProtos {
* required .hbase.pb.Permission permission = 3;
*/
   private 

[19/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/main/java/org/apache/hadoop/hbase/master/BulkAssigner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/BulkAssigner.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/BulkAssigner.java
deleted file mode 100644
index 929cd4e..000
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/BulkAssigner.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.master;
-
-import java.io.IOException;
-import java.lang.Thread.UncaughtExceptionHandler;
-import java.util.concurrent.Executors;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Server;
-
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
-
-/**
- * Base class used bulk assigning and unassigning regions.
- * Encapsulates a fixed size thread pool of executors to run 
assignment/unassignment.
- * Implement {@link #populatePool(java.util.concurrent.ExecutorService)} and
- * {@link #waitUntilDone(long)}.  The default implementation of
- * the {@link #getUncaughtExceptionHandler()} is to abort the hosting
- * Server.
- */
-@InterfaceAudience.Private
-public abstract class BulkAssigner {
-  protected final Server server;
-
-  /**
-   * @param server An instance of Server
-   */
-  public BulkAssigner(final Server server) {
-this.server = server;
-  }
-
-  /**
-   * @return What to use for a thread prefix when executor runs.
-   */
-  protected String getThreadNamePrefix() {
-return this.server.getServerName() + "-" + this.getClass().getName(); 
-  }
-
-  protected UncaughtExceptionHandler getUncaughtExceptionHandler() {
-return new UncaughtExceptionHandler() {
-  @Override
-  public void uncaughtException(Thread t, Throwable e) {
-// Abort if exception of any kind.
-server.abort("Uncaught exception in " + t.getName(), e);
-  }
-};
-  }
-
-  protected int getThreadCount() {
-return this.server.getConfiguration().
-  getInt("hbase.bulk.assignment.threadpool.size", 20);
-  }
-
-  protected long getTimeoutOnRIT() {
-return this.server.getConfiguration().
-  getLong("hbase.bulk.assignment.waiton.empty.rit", 5 * 60 * 1000);
-  }
-
-  protected abstract void populatePool(
-  final java.util.concurrent.ExecutorService pool) throws IOException;
-
-  public boolean bulkAssign() throws InterruptedException, IOException {
-return bulkAssign(true);
-  }
-
-  /**
-   * Run the bulk assign.
-   * 
-   * @param sync
-   *  Whether to assign synchronously.
-   * @throws InterruptedException
-   * @return True if done.
-   * @throws IOException
-   */
-  public boolean bulkAssign(boolean sync) throws InterruptedException,
-  IOException {
-boolean result = false;
-ThreadFactoryBuilder builder = new ThreadFactoryBuilder();
-builder.setDaemon(true);
-builder.setNameFormat(getThreadNamePrefix() + "-%1$d");
-builder.setUncaughtExceptionHandler(getUncaughtExceptionHandler());
-int threadCount = getThreadCount();
-java.util.concurrent.ExecutorService pool =
-  Executors.newFixedThreadPool(threadCount, builder.build());
-try {
-  populatePool(pool);
-  // How long to wait on empty regions-in-transition.  If we timeout, the
-  // RIT monitor should do fixup.
-  if (sync) result = waitUntilDone(getTimeoutOnRIT());
-} finally {
-  // We're done with the pool.  It'll exit when its done all in queue.
-  pool.shutdown();
-}
-return result;
-  }
-
-  /**
-   * Wait until bulk assign is done.
-   * @param timeout How long to wait.
-   * @throws InterruptedException
-   * @return True if the condition we were waiting on happened.
-   */
-  protected abstract boolean waitUntilDone(final long timeout)
-  throws InterruptedException;
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/main/java/org/apache/hadoop/hbase/master/BulkReOpen.java
--
diff --git 

[25/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
index d7bbd05..0ec9b22 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProcedureProtos.java
@@ -1734,100 +1734,201 @@ public final class MasterProcedureProtos {
   }
 
   /**
-   * Protobuf enum {@code hbase.pb.MergeTableRegionsState}
+   * Protobuf enum {@code hbase.pb.DispatchMergingRegionsState}
*/
-  public enum MergeTableRegionsState
+  public enum DispatchMergingRegionsState
   implements 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum {
 /**
- * MERGE_TABLE_REGIONS_PREPARE = 1;
+ * DISPATCH_MERGING_REGIONS_PREPARE = 1;
  */
-MERGE_TABLE_REGIONS_PREPARE(1),
+DISPATCH_MERGING_REGIONS_PREPARE(1),
 /**
- * MERGE_TABLE_REGIONS_MOVE_REGION_TO_SAME_RS = 2;
+ * DISPATCH_MERGING_REGIONS_PRE_OPERATION = 2;
  */
-MERGE_TABLE_REGIONS_MOVE_REGION_TO_SAME_RS(2),
+DISPATCH_MERGING_REGIONS_PRE_OPERATION(2),
 /**
- * MERGE_TABLE_REGIONS_PRE_MERGE_OPERATION = 3;
+ * DISPATCH_MERGING_REGIONS_MOVE_REGION_TO_SAME_RS = 3;
  */
-MERGE_TABLE_REGIONS_PRE_MERGE_OPERATION(3),
+DISPATCH_MERGING_REGIONS_MOVE_REGION_TO_SAME_RS(3),
 /**
- * MERGE_TABLE_REGIONS_SET_MERGING_TABLE_STATE = 4;
+ * DISPATCH_MERGING_REGIONS_DO_MERGE_IN_RS = 4;
  */
-MERGE_TABLE_REGIONS_SET_MERGING_TABLE_STATE(4),
+DISPATCH_MERGING_REGIONS_DO_MERGE_IN_RS(4),
 /**
- * MERGE_TABLE_REGIONS_CLOSE_REGIONS = 5;
+ * DISPATCH_MERGING_REGIONS_POST_OPERATION = 5;
  */
-MERGE_TABLE_REGIONS_CLOSE_REGIONS(5),
+DISPATCH_MERGING_REGIONS_POST_OPERATION(5),
+;
+
 /**
- * MERGE_TABLE_REGIONS_CREATE_MERGED_REGION = 6;
+ * DISPATCH_MERGING_REGIONS_PREPARE = 1;
  */
-MERGE_TABLE_REGIONS_CREATE_MERGED_REGION(6),
+public static final int DISPATCH_MERGING_REGIONS_PREPARE_VALUE = 1;
 /**
- * MERGE_TABLE_REGIONS_PRE_MERGE_COMMIT_OPERATION = 7;
+ * DISPATCH_MERGING_REGIONS_PRE_OPERATION = 2;
  */
-MERGE_TABLE_REGIONS_PRE_MERGE_COMMIT_OPERATION(7),
+public static final int DISPATCH_MERGING_REGIONS_PRE_OPERATION_VALUE = 2;
 /**
- * MERGE_TABLE_REGIONS_UPDATE_META = 8;
+ * DISPATCH_MERGING_REGIONS_MOVE_REGION_TO_SAME_RS = 3;
  */
-MERGE_TABLE_REGIONS_UPDATE_META(8),
+public static final int 
DISPATCH_MERGING_REGIONS_MOVE_REGION_TO_SAME_RS_VALUE = 3;
 /**
- * MERGE_TABLE_REGIONS_POST_MERGE_COMMIT_OPERATION = 9;
+ * DISPATCH_MERGING_REGIONS_DO_MERGE_IN_RS = 4;
  */
-MERGE_TABLE_REGIONS_POST_MERGE_COMMIT_OPERATION(9),
+public static final int DISPATCH_MERGING_REGIONS_DO_MERGE_IN_RS_VALUE = 4;
 /**
- * MERGE_TABLE_REGIONS_OPEN_MERGED_REGION = 10;
+ * DISPATCH_MERGING_REGIONS_POST_OPERATION = 5;
  */
-MERGE_TABLE_REGIONS_OPEN_MERGED_REGION(10),
+public static final int DISPATCH_MERGING_REGIONS_POST_OPERATION_VALUE = 5;
+
+
+public final int getNumber() {
+  return value;
+}
+
 /**
- * MERGE_TABLE_REGIONS_POST_OPERATION = 11;
+ * @deprecated Use {@link #forNumber(int)} instead.
  */
-MERGE_TABLE_REGIONS_POST_OPERATION(11),
-;
+@java.lang.Deprecated
+public static DispatchMergingRegionsState valueOf(int value) {
+  return forNumber(value);
+}
+
+public static DispatchMergingRegionsState forNumber(int value) {
+  switch (value) {
+case 1: return DISPATCH_MERGING_REGIONS_PREPARE;
+case 2: return DISPATCH_MERGING_REGIONS_PRE_OPERATION;
+case 3: return DISPATCH_MERGING_REGIONS_MOVE_REGION_TO_SAME_RS;
+case 4: return DISPATCH_MERGING_REGIONS_DO_MERGE_IN_RS;
+case 5: return DISPATCH_MERGING_REGIONS_POST_OPERATION;
+default: return null;
+  }
+}
+
+public static 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap
+internalGetValueMap() {
+  return internalValueMap;
+}
+private static final 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<
+DispatchMergingRegionsState> internalValueMap =
+  new 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap()
 {
+public DispatchMergingRegionsState findValueByNumber(int number) {
+  return DispatchMergingRegionsState.forNumber(number);
+}
+  };
+
+public final 

[28/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
--
diff --git 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
index 1bb6118..b648cf2 100644
--- 
a/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
+++ 
b/hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
@@ -32,6 +32,8 @@ import java.util.Set;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.CopyOnWriteArrayList;
 import java.util.concurrent.DelayQueue;
@@ -113,9 +115,11 @@ public class ProcedureExecutor {
* Internal cleaner that removes the completed procedure results after a TTL.
* NOTE: This is a special case handled in timeoutLoop().
*
-   * Since the client code looks more or less like:
+   * Since the client code looks more or less like:
+   * 
*   procId = master.doOperation()
*   while (master.getProcResult(procId) == ProcInProgress);
+   * 
* The master should not throw away the proc result as soon as the procedure 
is done
* but should wait a result request from the client (see 
executor.removeResult(procId))
* The client will call something like master.isProcDone() or 
master.getProcResult()
@@ -480,10 +484,10 @@ public class ProcedureExecutor {
 // We have numThreads executor + one timer thread used for timing out
 // procedures and triggering periodic procedures.
 this.corePoolSize = numThreads;
-LOG.info("Starting executor worker threads=" + corePoolSize);
+LOG.info("Starting ProcedureExecutor Worker threads (ProcExecWrkr)=" + 
corePoolSize);
 
 // Create the Thread Group for the executors
-threadGroup = new ThreadGroup("ProcedureExecutor");
+threadGroup = new ThreadGroup("ProcExecThrdGrp");
 
 // Create the timeout executor
 timeoutExecutor = new TimeoutExecutorThread(threadGroup);
@@ -1077,13 +1081,16 @@ public class ProcedureExecutor {
 final Long rootProcId = getRootProcedureId(proc);
 if (rootProcId == null) {
   // The 'proc' was ready to run but the root procedure was rolledback
+  LOG.warn("Rollback because parent is done/rolledback proc=" + proc);
   executeRollback(proc);
   return;
 }
 
 final RootProcedureState procStack = rollbackStack.get(rootProcId);
-if (procStack == null) return;
-
+if (procStack == null) {
+  LOG.warn("RootProcedureState is null for " + proc.getProcId());
+  return;
+}
 do {
   // Try to acquire the execution
   if (!procStack.acquire(proc)) {
@@ -1097,6 +1104,7 @@ public class ProcedureExecutor {
   scheduler.yield(proc);
   break;
 case LOCK_EVENT_WAIT:
+  LOG.info("LOCK_EVENT_WAIT rollback..." + proc);
   procStack.unsetRollback();
   break;
 default:
@@ -1114,6 +1122,7 @@ public class ProcedureExecutor {
 scheduler.yield(proc);
 break;
   case LOCK_EVENT_WAIT:
+LOG.info("LOCK_EVENT_WAIT can't rollback child running?..." + 
proc);
 break;
   default:
 throw new UnsupportedOperationException();
@@ -1125,16 +1134,21 @@ public class ProcedureExecutor {
 
   // Execute the procedure
   assert proc.getState() == ProcedureState.RUNNABLE : proc;
-  switch (acquireLock(proc)) {
+  // Note that lock is NOT about concurrency but rather about ensuring
+  // ownership of a procedure of an entity such as a region or table
+  LockState lockState = acquireLock(proc);
+  switch (lockState) {
 case LOCK_ACQUIRED:
   execProcedure(procStack, proc);
   releaseLock(proc, false);
   break;
 case LOCK_YIELD_WAIT:
+  LOG.info(lockState + " " + proc);
   scheduler.yield(proc);
   break;
 case LOCK_EVENT_WAIT:
-  // someone will wake us up when the lock is available
+  // Someone will wake us up when the lock is available
+  LOG.debug(lockState + " " + proc);
   break;
 default:
   throw new UnsupportedOperationException();
@@ -1150,10 +1164,7 @@ public class ProcedureExecutor {
   if (proc.isSuccess()) {
 // update metrics on finishing the procedure
 proc.updateMetricsOnFinish(getEnvironment(), proc.elapsedTime(), true);
-
-if (LOG.isDebugEnabled()) {
-  LOG.debug("Finished " + proc + " in " + 

[26/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java
index 711b9c8..812cf3b 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/AdminProtos.java
@@ -728,6 +728,40 @@ public final class AdminProtos {
  * optional bool isRecovering = 3;
  */
 boolean getIsRecovering();
+
+/**
+ * 
+ * True if region is splittable, false otherwise.
+ * 
+ *
+ * optional bool splittable = 4;
+ */
+boolean hasSplittable();
+/**
+ * 
+ * True if region is splittable, false otherwise.
+ * 
+ *
+ * optional bool splittable = 4;
+ */
+boolean getSplittable();
+
+/**
+ * 
+ * True if region is mergeable, false otherwise.
+ * 
+ *
+ * optional bool mergeable = 5;
+ */
+boolean hasMergeable();
+/**
+ * 
+ * True if region is mergeable, false otherwise.
+ * 
+ *
+ * optional bool mergeable = 5;
+ */
+boolean getMergeable();
   }
   /**
* Protobuf type {@code hbase.pb.GetRegionInfoResponse}
@@ -743,6 +777,8 @@ public final class AdminProtos {
 private GetRegionInfoResponse() {
   compactionState_ = 0;
   isRecovering_ = false;
+  splittable_ = false;
+  mergeable_ = false;
 }
 
 @java.lang.Override
@@ -802,6 +838,16 @@ public final class AdminProtos {
   isRecovering_ = input.readBool();
   break;
 }
+case 32: {
+  bitField0_ |= 0x0008;
+  splittable_ = input.readBool();
+  break;
+}
+case 40: {
+  bitField0_ |= 0x0010;
+  mergeable_ = input.readBool();
+  break;
+}
   }
 }
   } catch 
(org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException
 e) {
@@ -987,6 +1033,52 @@ public final class AdminProtos {
   return isRecovering_;
 }
 
+public static final int SPLITTABLE_FIELD_NUMBER = 4;
+private boolean splittable_;
+/**
+ * 
+ * True if region is splittable, false otherwise.
+ * 
+ *
+ * optional bool splittable = 4;
+ */
+public boolean hasSplittable() {
+  return ((bitField0_ & 0x0008) == 0x0008);
+}
+/**
+ * 
+ * True if region is splittable, false otherwise.
+ * 
+ *
+ * optional bool splittable = 4;
+ */
+public boolean getSplittable() {
+  return splittable_;
+}
+
+public static final int MERGEABLE_FIELD_NUMBER = 5;
+private boolean mergeable_;
+/**
+ * 
+ * True if region is mergeable, false otherwise.
+ * 
+ *
+ * optional bool mergeable = 5;
+ */
+public boolean hasMergeable() {
+  return ((bitField0_ & 0x0010) == 0x0010);
+}
+/**
+ * 
+ * True if region is mergeable, false otherwise.
+ * 
+ *
+ * optional bool mergeable = 5;
+ */
+public boolean getMergeable() {
+  return mergeable_;
+}
+
 private byte memoizedIsInitialized = -1;
 public final boolean isInitialized() {
   byte isInitialized = memoizedIsInitialized;
@@ -1016,6 +1108,12 @@ public final class AdminProtos {
   if (((bitField0_ & 0x0004) == 0x0004)) {
 output.writeBool(3, isRecovering_);
   }
+  if (((bitField0_ & 0x0008) == 0x0008)) {
+output.writeBool(4, splittable_);
+  }
+  if (((bitField0_ & 0x0010) == 0x0010)) {
+output.writeBool(5, mergeable_);
+  }
   unknownFields.writeTo(output);
 }
 
@@ -1036,6 +1134,14 @@ public final class AdminProtos {
 size += 
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
   .computeBoolSize(3, isRecovering_);
   }
+  if (((bitField0_ & 0x0008) == 0x0008)) {
+size += 
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+  .computeBoolSize(4, splittable_);
+  }
+  if (((bitField0_ & 0x0010) == 0x0010)) {
+size += 
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+  .computeBoolSize(5, mergeable_);
+  }
   size += unknownFields.getSerializedSize();
   memoizedSize = size;
   return size;
@@ -1067,6 +1173,16 @@ public final class AdminProtos {
 result = result && (getIsRecovering()
 == other.getIsRecovering());
   }
+  result = result && 

[20/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
deleted file mode 100644
index 69ebd97..000
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
+++ /dev/null
@@ -1,3053 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.master;
-
-import com.google.common.annotations.VisibleForTesting;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableMap;
-import java.util.Random;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.concurrent.ScheduledThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantLock;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.CoordinatedStateException;
-import org.apache.hadoop.hbase.HBaseIOException;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.MetaTableAccessor;
-import org.apache.hadoop.hbase.NotServingRegionException;
-import org.apache.hadoop.hbase.RegionLocations;
-import org.apache.hadoop.hbase.RegionStateListener;
-import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.client.MasterSwitchType;
-import org.apache.hadoop.hbase.client.RegionReplicaUtil;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.TableState;
-import org.apache.hadoop.hbase.executor.EventHandler;
-import org.apache.hadoop.hbase.executor.EventType;
-import org.apache.hadoop.hbase.executor.ExecutorService;
-import org.apache.hadoop.hbase.favored.FavoredNodesManager;
-import org.apache.hadoop.hbase.favored.FavoredNodesPromoter;
-import org.apache.hadoop.hbase.ipc.FailedServerException;
-import org.apache.hadoop.hbase.ipc.RpcClient;
-import org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-import org.apache.hadoop.hbase.master.RegionState.State;
-import org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
-import org.apache.hadoop.hbase.quotas.QuotaExceededException;
-import org.apache.hadoop.hbase.regionserver.RegionOpeningState;
-import org.apache.hadoop.hbase.regionserver.RegionServerAbortedException;
-import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.hbase.util.KeyLocker;
-import org.apache.hadoop.hbase.util.Pair;
-import org.apache.hadoop.hbase.util.PairOfSameType;
-import org.apache.hadoop.hbase.util.RetryCounter;
-import org.apache.hadoop.hbase.util.Threads;
-import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
-import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-import 

[21/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java
index 454e3bc..4d5953c 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/SnapshotProtos.java
@@ -357,7 +357,7 @@ public final class SnapshotProtos {
   if (ref instanceof java.lang.String) {
 return (java.lang.String) ref;
   } else {
-org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = 
 (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) 
ref;
 java.lang.String s = bs.toStringUtf8();
 if (bs.isValidUtf8()) {
@@ -373,7 +373,7 @@ public final class SnapshotProtos {
 getNameBytes() {
   java.lang.Object ref = name_;
   if (ref instanceof java.lang.String) {
-org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
 (java.lang.String) ref);
 name_ = b;
@@ -407,7 +407,7 @@ public final class SnapshotProtos {
   if (ref instanceof java.lang.String) {
 return (java.lang.String) ref;
   } else {
-org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = 
 (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) 
ref;
 java.lang.String s = bs.toStringUtf8();
 if (bs.isValidUtf8()) {
@@ -427,7 +427,7 @@ public final class SnapshotProtos {
 getTableBytes() {
   java.lang.Object ref = table_;
   if (ref instanceof java.lang.String) {
-org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
 (java.lang.String) ref);
 table_ = b;
@@ -499,7 +499,7 @@ public final class SnapshotProtos {
   if (ref instanceof java.lang.String) {
 return (java.lang.String) ref;
   } else {
-org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = 
 (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) 
ref;
 java.lang.String s = bs.toStringUtf8();
 if (bs.isValidUtf8()) {
@@ -515,7 +515,7 @@ public final class SnapshotProtos {
 getOwnerBytes() {
   java.lang.Object ref = owner_;
   if (ref instanceof java.lang.String) {
-org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
 (java.lang.String) ref);
 owner_ = b;
@@ -1047,7 +1047,7 @@ public final class SnapshotProtos {
   getNameBytes() {
 java.lang.Object ref = name_;
 if (ref instanceof String) {
-  org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+  org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
   
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
   (java.lang.String) ref);
   name_ = b;
@@ -1135,7 +1135,7 @@ public final class SnapshotProtos {
   getTableBytes() {
 java.lang.Object ref = table_;
 if (ref instanceof String) {
-  org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+  org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
   
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
   (java.lang.String) ref);
   table_ = b;
@@ -1323,7 +1323,7 @@ public final class SnapshotProtos {
   getOwnerBytes() {
 java.lang.Object ref = owner_;
 if (ref instanceof String) {
-  org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+  org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b = 
   
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
   (java.lang.String) ref);
   owner_ = b;
@@ 

[29/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a 
new AssignmentManager, one that describes Assignment using a State Machine 
built on top of ProcedureV2 facility.

This doc. keeps state on where we are at w/ the new AM:
https://docs.google.com/document/d/1eVKa7FHdeoJ1-9o8yZcOTAQbv0u0bblBlCCzVSIn69g/edit#heading=h.vfdoxqut9lqn
Includes list of tests disabled by this patch with reasons why.

Based on patches from Matteos' repository and then fix up to get it all to pass 
cluster
tests, filling in some missing functionality, fix of findbugs, fixing bugs, 
etc..
including:

1. HBASE-14616 Procedure v2 - Replace the old AM with the new AM.
The basis comes from Matteo's repo here:
https://github.com/matteobertozzi/hbase/commit/689227fcbfe8e6588433dbcdabf4526e3d478b2e

Patch replaces old AM with the new under subpackage master.assignment.
Mostly just updating classes to use new AM -- import changes -- rather
than the old. It also removes old AM and supporting classes.
See below for more detail.

2. HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi)
https://github.com/matteobertozzi/hbase/commit/3622cba4e331d2fc7bfc1932abb4c9cbf5802efa

Adds running of remote procedure. Adds batching of remote calls.
Adds support for assign/unassign in procedures. Adds version info
reporting in rpc. Adds start of an AMv2.

3. Reporting of remote RS version is from here:
https://github.com/matteobertozzi/hbase/commit/ddb4df3964e8298c88c0210e83493aa91ac0942d.patch

4. And remote dispatch of procedures is from:
https://github.com/matteobertozzi/hbase/commit/186b9e7c4dae61a79509a6c3aad7f80ec61345e5

5. The split merge patches from here are also melded in:
https://github.com/matteobertozzi/hbase/commit/9a3a95a2c2974842a4849d1ad867e70764e7f707
and 
https://github.com/matteobertozzi/hbase/commit/d6289307a02a777299f65238238a2a8af3253067

We add testing util for new AM and new sets of tests.

Does a bunch of fixup on logging so its possible to follow a procedures' 
narrative by grepping
procedure id. We spewed loads of log too on big transitions such as master 
fail; fixed.

Fix CatalogTracker. Make it use Procedures doing clean up of Region data on 
split/merge.
Without these changes, ITBLL was failing at larger scale (3-4hours 5B rows) 
because we were
splitting split Regions among other things (CJ would run but wasn't
taking lock on Regions so havoc).

Added a bunch of doc. on Procedure primitives.

Added new region-based state machine base class. Moved region-based
state machines on to it.

Found bugs in the way procedure locking was doing in a few of the
region-based Procedures. Having them all have same subclass helps here.

Added isSplittable and isMergeable to the Region Interface.

Master would split/merge even though the Regions still had
references. Fixed it so Master asks RegionServer if Region
is splittable.

Messing more w/ logging. Made all procedures log the same and report
the state the same; helps when logging is regular.

Rewrote TestCatalogTracker. Enabled TestMergeTableRegionProcedure.

Added more functionality to MockMasterServices so can use it doing
standalone testing of Procedures (made TestCatalogTracker use it
instead of its own version).

Add to MasterServices ability to wait on Master being up -- makes
it so can Mock Master and start to implement standalone split testing.
Start in on a Split region standalone test in TestAM.

Fix bug where a Split can fail because it comes in in the middle of
a Move (by holding lock for duration of a Move).

Breaks CPs that were watching merge/split. These are run by Master now
so you need to observe on Master, not on RegionServer.

Details:

M hbase-client/src/main/java/org/apache/hadoop/hbase/ClusterStatus.java
Takes List of regionstates on construction rather than a Set.
NOTE! This is a change in a public class.

M hbase-client/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
Add utility getShortNameToLog

M 
hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
M 
hbase-client/src/main/java/org/apache/hadoop/hbase/client/ShortCircuitMasterConnection.java
Add support for dispatching assign, split and merge processes.

M hbase-client/src/main/java/org/apache/hadoop/hbase/master/RegionState.java
Purge old overlapping states: PENDING_OPEN, PENDING_CLOSE, etc.

M 
hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/Procedure.java
Lots of doc on its inner workings. Bug fixes.

M 
hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/ProcedureExecutor.java
Log and doc on workings. Bug fixes.

A 
hbase-procedure/src/main/java/org/apache/hadoop/hbase/procedure2/RemoteProcedureDispatcher.java
Dispatch remote procedures every 150ms or 32 items -- which ever
happens first (configurable). Runs a timeout thread. This facility is
not on yet; will come in as part of a later fix. Currently works a
region at a time. This class carries notion of a remote procedure and of a 

[14/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
index ced7abc..c3900dd 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/CreateTableProcedure.java
@@ -37,7 +37,6 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
 import org.apache.hadoop.hbase.client.TableState;
-import org.apache.hadoop.hbase.master.AssignmentManager;
 import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
 import org.apache.hadoop.hbase.master.MasterFileSystem;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@@ -107,10 +106,12 @@ public class CreateTableProcedure
   setNextState(CreateTableState.CREATE_TABLE_ASSIGN_REGIONS);
   break;
 case CREATE_TABLE_ASSIGN_REGIONS:
-  assignRegions(env, getTableName(), newRegions);
+  setEnablingState(env, getTableName());
+  
addChildProcedure(env.getAssignmentManager().createAssignProcedures(newRegions));
   setNextState(CreateTableState.CREATE_TABLE_UPDATE_DESC_CACHE);
   break;
 case CREATE_TABLE_UPDATE_DESC_CACHE:
+  setEnabledState(env, getTableName());
   updateTableDescCache(env, getTableName());
   setNextState(CreateTableState.CREATE_TABLE_POST_OPERATION);
   break;
@@ -333,21 +334,21 @@ public class CreateTableProcedure
   protected static List addTableToMeta(final MasterProcedureEnv 
env,
   final HTableDescriptor hTableDescriptor,
   final List regions) throws IOException {
-if (regions != null && regions.size() > 0) {
-  ProcedureSyncWait.waitMetaRegions(env);
+assert (regions != null && regions.size() > 0) : "expected at least 1 
region, got " + regions;
 
-  // Add regions to META
-  addRegionsToMeta(env, hTableDescriptor, regions);
-  // Add replicas if needed
-  List newRegions = addReplicas(env, hTableDescriptor, 
regions);
+ProcedureSyncWait.waitMetaRegions(env);
 
-  // Setup replication for region replicas if needed
-  if (hTableDescriptor.getRegionReplication() > 1) {
-
ServerRegionReplicaUtil.setupRegionReplicaReplication(env.getMasterConfiguration());
-  }
-  return newRegions;
+// Add replicas if needed
+List newRegions = addReplicas(env, hTableDescriptor, regions);
+
+// Add regions to META
+addRegionsToMeta(env, hTableDescriptor, newRegions);
+
+// Setup replication for region replicas if needed
+if (hTableDescriptor.getRegionReplication() > 1) {
+  
ServerRegionReplicaUtil.setupRegionReplicaReplication(env.getMasterConfiguration());
 }
-return regions;
+return newRegions;
   }
 
   /**
@@ -374,18 +375,16 @@ public class CreateTableProcedure
 return hRegionInfos;
   }
 
-  protected static void assignRegions(final MasterProcedureEnv env,
-  final TableName tableName, final List regions) throws 
IOException {
-ProcedureSyncWait.waitRegionServers(env);
 
+  protected static void setEnablingState(final MasterProcedureEnv env, final 
TableName tableName)
+  throws IOException {
 // Mark the table as Enabling
 env.getMasterServices().getTableStateManager()
   .setTableState(tableName, TableState.State.ENABLING);
+  }
 
-// Trigger immediate assignment of the regions in round-robin fashion
-final AssignmentManager assignmentManager = 
env.getMasterServices().getAssignmentManager();
-ModifyRegionUtils.assignRegions(assignmentManager, regions);
-
+  protected static void setEnabledState(final MasterProcedureEnv env, final 
TableName tableName)
+  throws IOException {
 // Enable table
 env.getMasterServices().getTableStateManager()
   .setTableState(tableName, TableState.State.ENABLED);

http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
index 096172a..78bd715 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
@@ -30,7 

[05/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyTableProcedure.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyTableProcedure.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyTableProcedure.java
index c5c6484..8872c63 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyTableProcedure.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestModifyTableProcedure.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
+import org.apache.hadoop.hbase.CategoryBasedTimeout;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HRegionInfo;
@@ -30,18 +31,19 @@ import org.apache.hadoop.hbase.ProcedureInfo;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
 import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ModifyTableState;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
+import org.junit.rules.TestRule;
 
 @Category({MasterTests.class, MediumTests.class})
 public class TestModifyTableProcedure extends TestTableDDLProcedureBase {
-  @Rule
-  public TestName name = new TestName();
+  @Rule public final TestRule timeout = 
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
+  withLookingForStuckThread(true).build();
+  @Rule public TestName name = new TestName();
 
   @Test(timeout=6)
   public void testModifyTable() throws Exception {
@@ -208,8 +210,7 @@ public class TestModifyTableProcedure extends 
TestTableDDLProcedureBase {
   new ModifyTableProcedure(procExec.getEnvironment(), htd));
 
 // Restart the executor and execute the step twice
-int numberOfSteps = ModifyTableState.values().length;
-MasterProcedureTestingUtility.testRecoveryAndDoubleExecution(procExec, 
procId, numberOfSteps);
+MasterProcedureTestingUtility.testRecoveryAndDoubleExecution(procExec, 
procId);
 
 // Validate descriptor
 HTableDescriptor currentHtd = 
UTIL.getAdmin().getTableDescriptor(tableName);
@@ -246,8 +247,7 @@ public class TestModifyTableProcedure extends 
TestTableDDLProcedureBase {
   new ModifyTableProcedure(procExec.getEnvironment(), htd));
 
 // Restart the executor and execute the step twice
-int numberOfSteps = ModifyTableState.values().length;
-MasterProcedureTestingUtility.testRecoveryAndDoubleExecution(procExec, 
procId, numberOfSteps);
+MasterProcedureTestingUtility.testRecoveryAndDoubleExecution(procExec, 
procId);
 
 // Validate descriptor
 HTableDescriptor currentHtd = 
UTIL.getAdmin().getTableDescriptor(tableName);
@@ -282,7 +282,7 @@ public class TestModifyTableProcedure extends 
TestTableDDLProcedureBase {
 long procId = procExec.submitProcedure(
   new ModifyTableProcedure(procExec.getEnvironment(), htd));
 
-int numberOfSteps = 1; // failing at pre operation
+int numberOfSteps = 0; // failing at pre operation
 MasterProcedureTestingUtility.testRollbackAndDoubleExecution(procExec, 
procId, numberOfSteps);
 
 // cf2 should not be present
@@ -315,7 +315,7 @@ public class TestModifyTableProcedure extends 
TestTableDDLProcedureBase {
   new ModifyTableProcedure(procExec.getEnvironment(), htd));
 
 // Restart the executor and rollback the step twice
-int numberOfSteps = 1; // failing at pre operation
+int numberOfSteps = 0; // failing at pre operation
 MasterProcedureTestingUtility.testRollbackAndDoubleExecution(procExec, 
procId, numberOfSteps);
 
 // cf2 should not be present

http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
index e6e90ef..47b1248 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestProcedureAdmin.java
@@ -18,12 +18,16 @@
 
 package org.apache.hadoop.hbase.master.procedure;
 
-import java.util.Random;
+import static 

[18/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionStateStore.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionStateStore.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionStateStore.java
deleted file mode 100644
index 3a2a6d7..000
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/RegionStateStore.java
+++ /dev/null
@@ -1,268 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.master;
-
-import com.google.common.base.Preconditions;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HRegionLocation;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.MetaTableAccessor;
-import org.apache.hadoop.hbase.RegionLocations;
-import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.master.RegionState.State;
-import org.apache.hadoop.hbase.regionserver.Region;
-import org.apache.hadoop.hbase.regionserver.RegionServerServices;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.MultiHConnection;
-import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-import org.apache.zookeeper.KeeperException;
-
-/**
- * A helper to persist region state in meta. We may change this class
- * to StateStore later if we also use it to store other states in meta
- */
-@InterfaceAudience.Private
-public class RegionStateStore {
-  private static final Log LOG = LogFactory.getLog(RegionStateStore.class);
-
-  /** The delimiter for meta columns for replicaIds  0 */
-  protected static final char META_REPLICA_ID_DELIMITER = '_';
-
-  private volatile Region metaRegion;
-  private volatile boolean initialized;
-  private MultiHConnection multiHConnection;
-  private final MasterServices server;
-
-  /**
-   * Returns the {@link ServerName} from catalog table {@link Result}
-   * where the region is transitioning. It should be the same as
-   * {@link MetaTableAccessor#getServerName(Result,int)} if the server is at 
OPEN state.
-   * @param r Result to pull the transitioning server name from
-   * @return A ServerName instance or {@link 
MetaTableAccessor#getServerName(Result,int)}
-   * if necessary fields not found or empty.
-   */
-  static ServerName getRegionServer(final Result r, int replicaId) {
-Cell cell = r.getColumnLatestCell(HConstants.CATALOG_FAMILY, 
getServerNameColumn(replicaId));
-if (cell == null || cell.getValueLength() == 0) {
-  RegionLocations locations = MetaTableAccessor.getRegionLocations(r);
-  if (locations != null) {
-HRegionLocation location = locations.getRegionLocation(replicaId);
-if (location != null) {
-  return location.getServerName();
-}
-  }
-  return null;
-}
-return ServerName.parseServerName(Bytes.toString(cell.getValueArray(),
-  cell.getValueOffset(), cell.getValueLength()));
-  }
-
-  private static byte[] getServerNameColumn(int replicaId) {
-return replicaId == 0
-? HConstants.SERVERNAME_QUALIFIER
-: Bytes.toBytes(HConstants.SERVERNAME_QUALIFIER_STR + 
META_REPLICA_ID_DELIMITER
-  + String.format(HRegionInfo.REPLICA_ID_FORMAT, replicaId));
-  }
-
-  /**
-   * Pull the region state from a catalog table {@link Result}.
-   * @param r Result to pull the region state from
-   * @return the region state, or OPEN if there's no value written.
-   */
-  static State getRegionState(final Result r, int replicaId) {
- 

[06/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
new file mode 100644
index 000..6824597
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/TestSplitTableRegionProcedure.java
@@ -0,0 +1,428 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.master.assignment;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.CategoryBasedTimeout;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Waiter;
+import org.apache.hadoop.hbase.client.CompactionState;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.master.assignment.SplitTableRegionProcedure;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureConstants;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureTestingUtility;
+import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
+import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.testclassification.MasterTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+import org.junit.rules.TestRule;
+
+@Category({MasterTests.class, MediumTests.class})
+public class TestSplitTableRegionProcedure {
+  private static final Log LOG = 
LogFactory.getLog(TestSplitTableRegionProcedure.class);
+  @Rule public final TestRule timeout = CategoryBasedTimeout.builder().
+  withTimeout(this.getClass()).withLookingForStuckThread(true).build();
+
+  protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
+
+  private static String ColumnFamilyName1 = "cf1";
+  private static String ColumnFamilyName2 = "cf2";
+
+  private static final int startRowNum = 11;
+  private static final int rowCount = 60;
+
+  @Rule
+  public TestName name = new TestName();
+
+  private static void setupConf(Configuration conf) {
+conf.setInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS, 1);
+conf.setLong(HConstants.MAJOR_COMPACTION_PERIOD, 0);
+  }
+
+  @BeforeClass
+  public static void setupCluster() throws Exception {
+setupConf(UTIL.getConfiguration());
+UTIL.startMiniCluster(3);
+  }
+
+  @AfterClass
+  public static void cleanupTest() throws Exception {
+try {
+  UTIL.shutdownMiniCluster();
+} catch (Exception e) {
+  LOG.warn("failure shutting down cluster", e);
+}
+  }
+
+  @Before
+  public void setup() throws Exception {
+
ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(getMasterProcedureExecutor(),
 false);
+
+// Turn off balancer so it doesn't cut in and mess up our placements.
+

[08/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
index 32bce26..59e8fb3 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
@@ -27,8 +27,11 @@ import static org.mockito.Mockito.spy;
 
 import java.io.IOException;
 import java.util.Map;
+import java.util.NavigableMap;
 import java.util.SortedMap;
+import java.util.SortedSet;
 import java.util.TreeMap;
+import java.util.concurrent.ConcurrentSkipListMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -36,267 +39,141 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.CoordinatedStateManager;
+import org.apache.hadoop.hbase.CategoryBasedTimeout;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MetaMockingUtil;
 import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableDescriptors;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.ClusterConnection;
-import org.apache.hadoop.hbase.client.HConnectionTestingUtility;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.coordination.BaseCoordinatedStateManager;
-import org.apache.hadoop.hbase.coordination.SplitLogManagerCoordination;
-import 
org.apache.hadoop.hbase.coordination.SplitLogManagerCoordination.SplitLogManagerDetails;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.io.Reference;
 import 
org.apache.hadoop.hbase.master.CatalogJanitor.SplitParentFirstComparator;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException;
-import org.apache.hadoop.hbase.regionserver.HStore;
+import org.apache.hadoop.hbase.master.assignment.MockMasterServices;
+import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
 import org.apache.hadoop.hbase.regionserver.ChunkCreator;
+import org.apache.hadoop.hbase.regionserver.HStore;
 import org.apache.hadoop.hbase.regionserver.MemStoreLABImpl;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
-import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.HFileArchiveUtil;
+import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.hbase.util.Triple;
+import org.junit.After;
+import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
-import org.mockito.Mockito;
-import org.mockito.invocation.InvocationOnMock;
-import org.mockito.stubbing.Answer;
+import org.junit.rules.TestRule;
 
 @Category({MasterTests.class, SmallTests.class})
 public class TestCatalogJanitor {
   private static final Log LOG = LogFactory.getLog(TestCatalogJanitor.class);
-
-  @Rule
-  public TestName name = new TestName();
+  @Rule public final TestRule timeout = CategoryBasedTimeout.builder().
+ withTimeout(this.getClass()).withLookingForStuckThread(true).build();
+  @Rule public final TestName name = new TestName();
+  private static final HBaseTestingUtility HTU = new HBaseTestingUtility();
+  private MockMasterServices masterServices;
+  private 

[12/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
index 2703947..71c6b89 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ServerCrashProcedure.java
@@ -19,55 +19,40 @@ package org.apache.hadoop.hbase.master.procedure;
 
 import java.io.IOException;
 import java.io.InputStream;
-import java.io.InterruptedIOException;
 import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.Collection;
-import java.util.HashSet;
+import java.util.Iterator;
 import java.util.List;
-import java.util.Set;
-import java.util.concurrent.locks.Lock;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.client.ClusterConnection;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.RegionReplicaUtil;
-import org.apache.hadoop.hbase.client.TableState;
-import org.apache.hadoop.hbase.master.AssignmentManager;
 import org.apache.hadoop.hbase.master.MasterServices;
 import org.apache.hadoop.hbase.master.MasterWalManager;
-import org.apache.hadoop.hbase.master.RegionState;
-import org.apache.hadoop.hbase.master.RegionStates;
+import org.apache.hadoop.hbase.master.assignment.AssignProcedure;
+import org.apache.hadoop.hbase.master.assignment.AssignmentManager;
+import org.apache.hadoop.hbase.master.assignment.RegionTransitionProcedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
 import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
 import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo;
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.ServerCrashState;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode;
-import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
-import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.zookeeper.KeeperException;
 
 /**
  * Handle crashed server. This is a port to ProcedureV2 of what used to be 
euphemistically called
  * ServerShutdownHandler.
  *
- * The procedure flow varies dependent on whether meta is assigned, if we 
are
- * doing distributed log replay versus distributed log splitting, and if we 
are to split logs at
- * all.
- *
- * This procedure asks that all crashed servers get processed equally; we 
yield after the
- * completion of each successful flow step. We do this so that we do not 
'deadlock' waiting on
- * a region assignment so we can replay edits which could happen if a region 
moved there are edits
- * on two servers for replay.
+ * The procedure flow varies dependent on whether meta is assigned and if 
we are to split logs.
  *
- * TODO: ASSIGN and WAIT_ON_ASSIGN (at least) are not idempotent. Revisit 
when assign is pv2.
- * TODO: We do not have special handling for system tables.
+ * We come in here after ServerManager has noticed a server has expired. 
Procedures
+ * queued on the rpc should have been notified about fail and should be 
concurrently
+ * getting themselves ready to assign elsewhere.
  */
 public class ServerCrashProcedure
 extends StateMachineProcedure
@@ -75,36 +60,6 @@ implements ServerProcedureInterface {
   private static final Log LOG = LogFactory.getLog(ServerCrashProcedure.class);
 
   /**
-   * Configuration key to set how long to wait in ms doing a quick check on 
meta state.
-   */
-  public static final String KEY_SHORT_WAIT_ON_META =
-  "hbase.master.servercrash.short.wait.on.meta.ms";
-
-  public static final int DEFAULT_SHORT_WAIT_ON_META = 1000;
-
-  /**
-   * Configuration key to set how many retries to cycle before we give up on 
meta.
-   * Each attempt will wait at least {@link #KEY_SHORT_WAIT_ON_META} 
milliseconds.
-   */
-  public static final String KEY_RETRIES_ON_META =
-  "hbase.master.servercrash.meta.retries";
-
-  public static final int DEFAULT_RETRIES_ON_META = 10;
-
-  /**
-   * Configuration key to set how long to wait in ms on regions in 

[23/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java
index 2435564..1ccf488 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/QuotaProtos.java
@@ -2210,7 +2210,7 @@ public final class QuotaProtos {
* optional .hbase.pb.TimedQuota req_num = 1;
*/
   private 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
 
   getReqNumFieldBuilder() {
 if (reqNumBuilder_ == null) {
   reqNumBuilder_ = new 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -2328,7 +2328,7 @@ public final class QuotaProtos {
* optional .hbase.pb.TimedQuota req_size = 2;
*/
   private 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
 
   getReqSizeFieldBuilder() {
 if (reqSizeBuilder_ == null) {
   reqSizeBuilder_ = new 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -2446,7 +2446,7 @@ public final class QuotaProtos {
* optional .hbase.pb.TimedQuota write_num = 3;
*/
   private 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
 
   getWriteNumFieldBuilder() {
 if (writeNumBuilder_ == null) {
   writeNumBuilder_ = new 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -2564,7 +2564,7 @@ public final class QuotaProtos {
* optional .hbase.pb.TimedQuota write_size = 4;
*/
   private 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
 
   getWriteSizeFieldBuilder() {
 if (writeSizeBuilder_ == null) {
   writeSizeBuilder_ = new 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -2682,7 +2682,7 @@ public final class QuotaProtos {
* optional .hbase.pb.TimedQuota read_num = 5;
*/
   private 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota, 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.TimedQuota.Builder,
 

[24/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
index 798932e..5ea2044 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/MasterProtos.java
@@ -842,7 +842,7 @@ public final class MasterProtos {
* required .hbase.pb.TableName table_name = 1;
*/
   private 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
 
   getTableNameFieldBuilder() {
 if (tableNameBuilder_ == null) {
   tableNameBuilder_ = new 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -960,7 +960,7 @@ public final class MasterProtos {
* required .hbase.pb.ColumnFamilySchema column_families = 
2;
*/
   private 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>
 
   getColumnFamiliesFieldBuilder() {
 if (columnFamiliesBuilder_ == null) {
   columnFamiliesBuilder_ = new 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -2257,7 +2257,7 @@ public final class MasterProtos {
* required .hbase.pb.TableName table_name = 1;
*/
   private 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
 
   getTableNameFieldBuilder() {
 if (tableNameBuilder_ == null) {
   tableNameBuilder_ = new 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -3622,7 +3622,7 @@ public final class MasterProtos {
* required .hbase.pb.TableName table_name = 1;
*/
   private 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+  
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
 
   getTableNameFieldBuilder() {
 if (tableNameBuilder_ == null) {
   tableNameBuilder_ = new 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
@@ -3740,7 +3740,7 @@ public final class MasterProtos {
* required .hbase.pb.ColumnFamilySchema column_families = 
2;
*/
   private 
org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3<
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>
+  

[07/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
new file mode 100644
index 000..d558aaf
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
@@ -0,0 +1,358 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.master.assignment;
+
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.NavigableMap;
+import java.util.SortedSet;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.CoordinatedStateManager;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.ServerLoad;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableDescriptors;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.ClusterConnection;
+import org.apache.hadoop.hbase.client.HConnectionTestingUtility;
+import org.apache.hadoop.hbase.master.LoadBalancer;
+import org.apache.hadoop.hbase.master.MasterFileSystem;
+import org.apache.hadoop.hbase.master.MasterServices;
+import org.apache.hadoop.hbase.master.MasterWalManager;
+import org.apache.hadoop.hbase.master.MockNoopMasterServices;
+import org.apache.hadoop.hbase.master.RegionState.State;
+import org.apache.hadoop.hbase.master.ServerManager;
+import org.apache.hadoop.hbase.master.balancer.LoadBalancerFactory;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureConstants;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.master.procedure.RSProcedureDispatcher;
+import org.apache.hadoop.hbase.procedure2.Procedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureEvent;
+import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
+import org.apache.hadoop.hbase.procedure2.store.NoopProcedureStore;
+import org.apache.hadoop.hbase.procedure2.store.ProcedureStore;
+import org.apache.hadoop.hbase.security.Superusers;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionActionResult;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ResultOrException;
+import org.apache.hadoop.hbase.util.FSUtils;
+import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+
+/**
+ * A mocked master services.
+ * Tries to fake it. May not always work.
+ */
+public class MockMasterServices extends MockNoopMasterServices {
+  private final MasterFileSystem fileSystemManager;
+  private final MasterWalManager walManager;
+  private final AssignmentManager assignmentManager;
+
+  private MasterProcedureEnv procedureEnv;
+  private ProcedureExecutor procedureExecutor;
+  private ProcedureStore procedureStore;
+  private final ClusterConnection connection;
+  private final LoadBalancer balancer;
+  private final ServerManager 

[10/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 1f3fede..65b5922 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -84,6 +84,7 @@ import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.VersionInfoUtil;
 import org.apache.hadoop.hbase.conf.ConfigurationObserver;
 import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
+import org.apache.hadoop.hbase.exceptions.MergeRegionException;
 import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
 import org.apache.hadoop.hbase.exceptions.ScannerResetException;
 import org.apache.hadoop.hbase.filter.ByteArrayComparable;
@@ -131,12 +132,12 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ClearCompactionQueuesResponse;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionForSplitOrMergeRequest;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionForSplitOrMergeResponse;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.ExecuteProceduresResponse;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetOnlineRegionRequest;
@@ -149,6 +150,8 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerIn
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetServerInfoResponse;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetStoreFileResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.MergeRegionsResponse;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.OpenRegionResponse;
@@ -223,6 +226,8 @@ import org.apache.hadoop.hbase.wal.WALSplitter;
 import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
 import org.apache.zookeeper.KeeperException;
 
+import com.google.common.annotations.VisibleForTesting;
+
 /**
  * Implements the regionserver RPC services.
  */
@@ -1482,36 +1487,6 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
 }
   }
 
-  @Override
-  @QosPriority(priority=HConstants.ADMIN_QOS)
-  public CloseRegionForSplitOrMergeResponse closeRegionForSplitOrMerge(
-  final RpcController controller,
-  final CloseRegionForSplitOrMergeRequest request) throws ServiceException 
{
-try {
-  checkOpen();
-
-  List encodedRegionNameList = new ArrayList<>();
-  for(int i = 0; i < request.getRegionCount(); i++) {
-final String encodedRegionName = 
ProtobufUtil.getRegionEncodedName(request.getRegion(i));
-
-// Can be null if we're calling close on a region that's not online
-final Region targetRegion = 
regionServer.getFromOnlineRegions(encodedRegionName);
-if ((targetRegion != null) && (targetRegion.getCoprocessorHost() != 
null)) {
-  targetRegion.getCoprocessorHost().preClose(false);
-  encodedRegionNameList.add(encodedRegionName);
-}
-  }
-  requestCount.increment();
-  LOG.info("Close and offline " + encodedRegionNameList + " regions.");
-  boolean closed = 
regionServer.closeAndOfflineRegionForSplitOrMerge(encodedRegionNameList);
-  CloseRegionForSplitOrMergeResponse.Builder builder =
-  

[04/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
index 81846df..2630ce0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSplitTransactionOnCluster.java
@@ -39,7 +39,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Abortable;
+import org.apache.hadoop.hbase.CategoryBasedTimeout;
 import org.apache.hadoop.hbase.CoordinatedStateManager;
 import org.apache.hadoop.hbase.Coprocessor;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
@@ -66,23 +66,27 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.client.TestReplicasClient.SlowMeCopro;
-import org.apache.hadoop.hbase.coprocessor.MasterObserver;
 import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.MasterObserver;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
-import org.apache.hadoop.hbase.master.AssignmentManager;
+import org.apache.hadoop.hbase.exceptions.UnexpectedStateException;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.master.MasterRpcServices;
 import org.apache.hadoop.hbase.master.RegionState;
 import org.apache.hadoop.hbase.master.RegionState.State;
-import org.apache.hadoop.hbase.master.RegionStates;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse;
+import org.apache.hadoop.hbase.master.assignment.AssignmentManager;
+import org.apache.hadoop.hbase.master.assignment.RegionStates;
+import org.apache.hadoop.hbase.master.NoSuchProcedureException;
+import org.apache.hadoop.hbase.procedure2.Procedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
 import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
 import 
org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
 import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.ReportRegionStateTransitionResponse;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -98,11 +102,11 @@ import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
+import org.junit.rules.TestRule;
 
 /**
  * The below tests are testing split region against a running cluster
@@ -110,8 +114,9 @@ import org.junit.rules.TestName;
 @Category({RegionServerTests.class, LargeTests.class})
 @SuppressWarnings("deprecation")
 public class TestSplitTransactionOnCluster {
-  private static final Log LOG =
-LogFactory.getLog(TestSplitTransactionOnCluster.class);
+  private static final Log LOG = 
LogFactory.getLog(TestSplitTransactionOnCluster.class);
+  @Rule public final TestRule timeout = 
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
+  withLookingForStuckThread(true).build();
   private Admin admin = null;
   private MiniHBaseCluster cluster = null;
   private static final int NB_SERVERS = 3;
@@ -150,8 +155,11 @@ public class TestSplitTransactionOnCluster {
   throws IOException, InterruptedException {
 assertEquals(1, regions.size());
 HRegionInfo hri = regions.get(0).getRegionInfo();
-cluster.getMaster().getAssignmentManager()
-  

[11/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
new file mode 100644
index 000..e7157d0
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/CompactSplit.java
@@ -0,0 +1,723 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.Executors;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.RejectedExecutionHandler;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.conf.ConfigurationManager;
+import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
+import org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
+import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
+import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
+import 
org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;
+import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.hbase.util.StealJobQueue;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.util.StringUtils;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+
+/**
+ * Compact region on request and then run split if appropriate
+ */
+@InterfaceAudience.Private
+public class CompactSplit implements CompactionRequestor, 
PropagatingConfigurationObserver {
+  private static final Log LOG = LogFactory.getLog(CompactSplit.class);
+
+  // Configuration key for the large compaction threads.
+  public final static String LARGE_COMPACTION_THREADS =
+  "hbase.regionserver.thread.compaction.large";
+  public final static int LARGE_COMPACTION_THREADS_DEFAULT = 1;
+
+  // Configuration key for the small compaction threads.
+  public final static String SMALL_COMPACTION_THREADS =
+  "hbase.regionserver.thread.compaction.small";
+  public final static int SMALL_COMPACTION_THREADS_DEFAULT = 1;
+
+  // Configuration key for split threads
+  public final static String SPLIT_THREADS = "hbase.regionserver.thread.split";
+  public final static int SPLIT_THREADS_DEFAULT = 1;
+
+  // Configuration keys for merge threads
+  public final static String MERGE_THREADS = "hbase.regionserver.thread.merge";
+  public final static int MERGE_THREADS_DEFAULT = 1;
+
+  public static final String REGION_SERVER_REGION_SPLIT_LIMIT =
+  "hbase.regionserver.regionSplitLimit";
+  public static final int DEFAULT_REGION_SERVER_REGION_SPLIT_LIMIT= 1000;
+
+  private final HRegionServer server;
+  private final Configuration conf;
+
+  private final ThreadPoolExecutor longCompactions;
+  private final ThreadPoolExecutor shortCompactions;
+  private final ThreadPoolExecutor splits;
+  private final ThreadPoolExecutor mergePool;
+
+  private volatile ThroughputController compactionThroughputController;
+
+  /**
+   * Splitting should not take place if the total number of regions exceed 
this.
+   * This is not a hard limit to the number of regions but it is a guideline to
+   * stop splitting after number of online regions is greater than this.
+   */
+  private int regionSplitLimit;
+
+  /** 

[03/29] hbase git commit: HBASE-18042 Client Compatibility breaks between versions 1.2 and 1.3

2017-05-27 Thread stack
HBASE-18042 Client Compatibility breaks between versions 1.2 and 1.3


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6846b039
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6846b039
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6846b039

Branch: refs/heads/HBASE-14614
Commit: 6846b03944d7e72301b825d4d118732c0ca65577
Parents: efc7edc
Author: zhangduo 
Authored: Thu May 25 11:02:09 2017 +0800
Committer: zhangduo 
Committed: Sat May 27 17:55:49 2017 +0800

--
 .../hbase/regionserver/RSRpcServices.java   |  58 +---
 .../hbase/client/TestAlwaysSetScannerId.java|   5 +-
 .../hadoop/hbase/client/TestLeaseRenewal.java   |   3 +-
 .../client/TestScanWithoutFetchingData.java | 131 +++
 4 files changed, 175 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6846b039/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index b3ca94d..1f3fede 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -19,6 +19,8 @@
 package org.apache.hadoop.hbase.regionserver;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -27,10 +29,20 @@ import java.net.BindException;
 import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.nio.ByteBuffer;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
 import java.util.Map.Entry;
+import java.util.NavigableMap;
+import java.util.Set;
+import java.util.TreeSet;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.LongAdder;
@@ -194,7 +206,6 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMet
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
@@ -265,7 +276,10 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
 
   private final AtomicLong scannerIdGen = new AtomicLong(0L);
   private final ConcurrentMap scanners = new 
ConcurrentHashMap<>();
-
+  // Hold the name of a closed scanner for a while. This is used to keep 
compatible for old clients
+  // which may send next or close request to a region scanner which has 
already been exhausted. The
+  // entries will be removed automatically after scannerLeaseTimeoutPeriod.
+  private final Cache closedScanners;
   /**
* The lease timeout period for client scanners (milliseconds).
*/
@@ -1168,6 +1182,9 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
 isa = new InetSocketAddress(initialIsa.getHostName(), address.getPort());
 rpcServer.setErrorHandler(this);
 rs.setName(name);
+
+closedScanners = CacheBuilder.newBuilder()
+.expireAfterAccess(scannerLeaseTimeoutPeriod, 
TimeUnit.MILLISECONDS).build();
   }
 
   @Override
@@ -2790,18 +2807,18 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
 String scannerName = Long.toString(request.getScannerId());
 RegionScannerHolder rsh = scanners.get(scannerName);
 if (rsh == null) {
-  // just ignore the close request if scanner does not exists.
-  if (request.hasCloseScanner() && request.getCloseScanner()) {
+  // just ignore the next or close request if scanner does not exists.
+  if 

[01/29] hbase git commit: HBASE-18114 Update the config of TestAsync*AdminApi to make test stable [Forced Update!]

2017-05-27 Thread stack
Repository: hbase
Updated Branches:
  refs/heads/HBASE-14614 444fbed0e -> 657a5d46b (forced update)


HBASE-18114 Update the config of TestAsync*AdminApi to make test stable


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/97484f2a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/97484f2a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/97484f2a

Branch: refs/heads/HBASE-14614
Commit: 97484f2aaf3809137fd50180164dc2c741d05ee8
Parents: 564c193
Author: Guanghao Zhang 
Authored: Thu May 25 19:16:44 2017 +0800
Committer: Guanghao Zhang 
Committed: Sat May 27 11:11:40 2017 +0800

--
 .../apache/hadoop/hbase/client/TestAsyncAdminBase.java |  7 +++
 .../hbase/client/TestAsyncNamespaceAdminApi.java   |  8 ++--
 .../hbase/client/TestAsyncProcedureAdminApi.java   | 13 +++--
 .../hadoop/hbase/client/TestAsyncRegionAdminApi.java   |  4 ++--
 .../hbase/client/TestAsyncReplicationAdminApi.java | 10 +++---
 .../hadoop/hbase/client/TestAsyncSnapshotAdminApi.java |  4 ++--
 .../hadoop/hbase/client/TestAsyncTableAdminApi.java|  3 ---
 7 files changed, 27 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/97484f2a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
index 1881d4c..cdb5433 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
@@ -45,10 +45,9 @@ public abstract class TestAsyncAdminBase {
 
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
-TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_PAUSE, 10);
-
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 3);
-TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 
1000);
-
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 
3000);
+TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 
6);
+
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 
12);
+
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2);
 TEST_UTIL.getConfiguration().setInt(START_LOG_ERRORS_AFTER_COUNT_KEY, 0);
 TEST_UTIL.startMiniCluster(2);
 ASYNC_CONN = 
ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();

http://git-wip-us.apache.org/repos/asf/hbase/blob/97484f2a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java
index 5cab420..ed2e246 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java
@@ -27,6 +27,7 @@ import static org.junit.Assert.fail;
 import java.util.concurrent.Callable;
 
 import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
@@ -37,7 +38,7 @@ import org.apache.hadoop.hbase.Waiter;
 import org.apache.hadoop.hbase.ZKNamespaceManager;
 import org.apache.hadoop.hbase.master.HMaster;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
-import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -45,7 +46,7 @@ import org.junit.experimental.categories.Category;
 /**
  * Class to test asynchronous namespace admin operations.
  */
-@Category({ MediumTests.class, ClientTests.class })
+@Category({ LargeTests.class, ClientTests.class })
 public class TestAsyncNamespaceAdminApi extends TestAsyncAdminBase {
 
   private String prefix = "TestNamespace";
@@ -54,6 +55,9 @@ public class TestAsyncNamespaceAdminApi extends 
TestAsyncAdminBase {
 
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
+TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 
6);
+

[02/29] hbase git commit: HBASE-18115 Move SaslServer creation to HBaseSaslRpcServer

2017-05-27 Thread stack
HBASE-18115 Move SaslServer creation to HBaseSaslRpcServer


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/efc7edc8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/efc7edc8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/efc7edc8

Branch: refs/heads/HBASE-14614
Commit: efc7edc81a0d9da486ca37b8314baf5a7e75bc86
Parents: 97484f2
Author: zhangduo 
Authored: Sat May 27 11:38:41 2017 +0800
Committer: zhangduo 
Committed: Sat May 27 11:38:41 2017 +0800

--
 .../apache/hadoop/hbase/security/SaslUtil.java  |  11 +-
 .../org/apache/hadoop/hbase/ipc/RpcServer.java  |  11 +-
 .../hadoop/hbase/ipc/ServerRpcConnection.java   |  86 +++-
 .../hbase/security/HBaseSaslRpcServer.java  | 136 +--
 4 files changed, 136 insertions(+), 108 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/efc7edc8/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
index aaa9d7a..4b6abe3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
@@ -24,6 +24,7 @@ import java.util.TreeMap;
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslClient;
 import javax.security.sasl.SaslException;
+import javax.security.sasl.SaslServer;
 
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.logging.Log;
@@ -97,7 +98,7 @@ public class SaslUtil {
* @param rpcProtection Value of 'hbase.rpc.protection' configuration.
* @return Map with values for SASL properties.
*/
-  static Map initSaslProperties(String rpcProtection) {
+  public static Map initSaslProperties(String rpcProtection) {
 String saslQop;
 if (rpcProtection.isEmpty()) {
   saslQop = QualityOfProtection.AUTHENTICATION.getSaslQop();
@@ -123,4 +124,12 @@ public class SaslUtil {
   LOG.error("Error disposing of SASL client", e);
 }
   }
+
+  static void safeDispose(SaslServer saslServer) {
+try {
+  saslServer.dispose();
+} catch (SaslException e) {
+  LOG.error("Error disposing of SASL server", e);
+}
+  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/efc7edc8/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
index f899867..d553647 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java
@@ -30,8 +30,10 @@ import java.nio.channels.GatheringByteChannel;
 import java.nio.channels.ReadableByteChannel;
 import java.nio.channels.WritableByteChannel;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.concurrent.atomic.LongAdder;
 
@@ -55,7 +57,8 @@ import org.apache.hadoop.hbase.nio.ByteBuff;
 import org.apache.hadoop.hbase.nio.MultiByteBuff;
 import org.apache.hadoop.hbase.nio.SingleByteBuff;
 import org.apache.hadoop.hbase.regionserver.RSRpcServices;
-import org.apache.hadoop.hbase.security.HBaseSaslRpcServer;
+import org.apache.hadoop.hbase.security.SaslUtil;
+import org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.UserProvider;
 import org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager;
@@ -112,6 +115,7 @@ public abstract class RpcServer implements 
RpcServerInterface,
   protected static final Log AUDITLOG = LogFactory.getLog("SecurityLogger."
   + Server.class.getName());
   protected SecretManager secretManager;
+  protected final Map saslProps;
   protected ServiceAuthorizationManager authManager;
 
   /** This is set to Call object before Handler invokes an RPC and ybdie
@@ -307,7 +311,10 @@ public abstract class RpcServer implements 
RpcServerInterface,
 this.userProvider = UserProvider.instantiate(conf);
 this.isSecurityEnabled = userProvider.isHBaseSecurityEnabled();
 if (isSecurityEnabled) {
-  HBaseSaslRpcServer.init(conf);
+  saslProps = SaslUtil.initSaslProperties(conf.get("hbase.rpc.protection",
+

[22/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java
--
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java
index b886f5c..299b55e 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/RegionServerStatusProtos.java
@@ -8822,1348 +8822,6 @@ public final class RegionServerStatusProtos {
 
   }
 
-  public interface SplitTableRegionRequestOrBuilder extends
-  // 
@@protoc_insertion_point(interface_extends:hbase.pb.SplitTableRegionRequest)
-  org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder {
-
-/**
- * required .hbase.pb.RegionInfo region_info = 1;
- */
-boolean hasRegionInfo();
-/**
- * required .hbase.pb.RegionInfo region_info = 1;
- */
-org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo 
getRegionInfo();
-/**
- * required .hbase.pb.RegionInfo region_info = 1;
- */
-
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder
 getRegionInfoOrBuilder();
-
-/**
- * required bytes split_row = 2;
- */
-boolean hasSplitRow();
-/**
- * required bytes split_row = 2;
- */
-org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString 
getSplitRow();
-
-/**
- * optional uint64 nonce_group = 3 [default = 0];
- */
-boolean hasNonceGroup();
-/**
- * optional uint64 nonce_group = 3 [default = 0];
- */
-long getNonceGroup();
-
-/**
- * optional uint64 nonce = 4 [default = 0];
- */
-boolean hasNonce();
-/**
- * optional uint64 nonce = 4 [default = 0];
- */
-long getNonce();
-  }
-  /**
-   * 
-   **
-   * Splits the specified region.
-   * 
-   *
-   * Protobuf type {@code hbase.pb.SplitTableRegionRequest}
-   */
-  public  static final class SplitTableRegionRequest extends
-  org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 
implements
-  // 
@@protoc_insertion_point(message_implements:hbase.pb.SplitTableRegionRequest)
-  SplitTableRegionRequestOrBuilder {
-// Use SplitTableRegionRequest.newBuilder() to construct.
-private 
SplitTableRegionRequest(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder
 builder) {
-  super(builder);
-}
-private SplitTableRegionRequest() {
-  splitRow_ = 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
-  nonceGroup_ = 0L;
-  nonce_ = 0L;
-}
-
-@java.lang.Override
-public final 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
-getUnknownFields() {
-  return this.unknownFields;
-}
-private SplitTableRegionRequest(
-org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream 
input,
-
org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite 
extensionRegistry)
-throws 
org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException
 {
-  this();
-  int mutable_bitField0_ = 0;
-  
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder 
unknownFields =
-  
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder();
-  try {
-boolean done = false;
-while (!done) {
-  int tag = input.readTag();
-  switch (tag) {
-case 0:
-  done = true;
-  break;
-default: {
-  if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
-done = true;
-  }
-  break;
-}
-case 10: {
-  
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder
 subBuilder = null;
-  if (((bitField0_ & 0x0001) == 0x0001)) {
-subBuilder = regionInfo_.toBuilder();
-  }
-  regionInfo_ = 
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.PARSER,
 extensionRegistry);
-  if (subBuilder != null) {
-subBuilder.mergeFrom(regionInfo_);
-regionInfo_ = subBuilder.buildPartial();
-  }
-  bitField0_ |= 0x0001;
-  break;
-}
-case 18: {
-  bitField0_ |= 0x0002;
-  splitRow_ = input.readBytes();
-  break;
-}
- 

[13/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MergeTableRegionsProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MergeTableRegionsProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MergeTableRegionsProcedure.java
deleted file mode 100644
index 3600fe0..000
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/MergeTableRegionsProcedure.java
+++ /dev/null
@@ -1,906 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.master.procedure;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InterruptedIOException;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.DoNotRetryIOException;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.MetaMutationAnnotation;
-import org.apache.hadoop.hbase.RegionLoad;
-import org.apache.hadoop.hbase.ServerLoad;
-import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.UnknownRegionException;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.client.Mutation;
-import org.apache.hadoop.hbase.exceptions.MergeRegionException;
-import org.apache.hadoop.hbase.io.hfile.CacheConfig;
-import org.apache.hadoop.hbase.master.AssignmentManager;
-import org.apache.hadoop.hbase.master.CatalogJanitor;
-import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
-import org.apache.hadoop.hbase.master.MasterFileSystem;
-import org.apache.hadoop.hbase.master.RegionPlan;
-import org.apache.hadoop.hbase.master.RegionState;
-import org.apache.hadoop.hbase.master.RegionStates;
-import org.apache.hadoop.hbase.master.ServerManager;
-import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
-import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MergeTableRegionsState;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-import org.apache.hadoop.hbase.util.FSUtils;
-
-/**
- * The procedure to Merge a region in a table.
- */
-@InterfaceAudience.Private
-public class MergeTableRegionsProcedure
-extends AbstractStateMachineTableProcedure {
-  private static final Log LOG = 
LogFactory.getLog(MergeTableRegionsProcedure.class);
-
-  private Boolean traceEnabled;
-  private AssignmentManager assignmentManager;
-  private int timeout;
-  private ServerName regionLocation;
-  private String regionsToMergeListFullName;
-  private String regionsToMergeListEncodedName;
-
-  private HRegionInfo [] regionsToMerge;
-  private HRegionInfo mergedRegionInfo;
-  private boolean forcible;
-
-  public MergeTableRegionsProcedure() {
-this.traceEnabled = isTraceEnabled();
-this.assignmentManager = null;
-this.timeout = -1;
-this.regionLocation = null;
-this.regionsToMergeListFullName = null;
-this.regionsToMergeListEncodedName = null;
-  }
-
-  public MergeTableRegionsProcedure(
-  final MasterProcedureEnv env,
-  final HRegionInfo[] regionsToMerge,
-  final boolean forcible) throws IOException {
-

[16/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
new file mode 100644
index 000..2b1de9d
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/MergeTableRegionsProcedure.java
@@ -0,0 +1,776 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.master.assignment;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MetaMutationAnnotation;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.UnknownRegionException;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.MasterSwitchType;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.RegionReplicaUtil;
+import org.apache.hadoop.hbase.exceptions.MergeRegionException;
+import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.master.CatalogJanitor;
+import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.master.MasterFileSystem;
+import org.apache.hadoop.hbase.master.RegionState;
+import 
org.apache.hadoop.hbase.master.procedure.AbstractStateMachineTableProcedure;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureUtil;
+import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
+import org.apache.hadoop.hbase.procedure2.ProcedureYieldException;
+import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.MergeTableRegionsState;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.FSUtils;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.lmax.disruptor.YieldingWaitStrategy;
+
+/**
+ * The procedure to Merge a region in a table.
+ * This procedure takes an exclusive table lock since it is working over 
multiple regions.
+ * It holds the lock for the life of the procedure.
+ */
+@InterfaceAudience.Private
+public class MergeTableRegionsProcedure
+extends AbstractStateMachineTableProcedure {
+  private static final Log LOG = 
LogFactory.getLog(MergeTableRegionsProcedure.class);
+  private Boolean traceEnabled;
+  private volatile boolean lock = false;
+  private ServerName regionLocation;
+  private HRegionInfo[] regionsToMerge;
+  private HRegionInfo mergedRegion;
+  private boolean forcible;
+
+  public MergeTableRegionsProcedure() {
+// Required by the Procedure framework to create the procedure on replay
+  }
+
+  public MergeTableRegionsProcedure(final MasterProcedureEnv env,
+  final HRegionInfo regionToMergeA, final HRegionInfo regionToMergeB) 
throws IOException {
+this(env, regionToMergeA, regionToMergeB, false);
+  }
+
+  public 

[15/29] hbase git commit: HBASE-14614 Procedure v2 - Core Assignment Manager (Matteo Bertozzi) Move to a new AssignmentManager, one that describes Assignment using a State Machine built on top of Proc

2017-05-27 Thread stack
http://git-wip-us.apache.org/repos/asf/hbase/blob/657a5d46/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
new file mode 100644
index 000..49124ea
--- /dev/null
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/assignment/RegionTransitionProcedure.java
@@ -0,0 +1,381 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.master.assignment;
+
+import java.io.IOException;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.exceptions.UnexpectedStateException;
+import org.apache.hadoop.hbase.master.assignment.RegionStates.RegionStateNode;
+import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.master.procedure.TableProcedureInterface;
+import org.apache.hadoop.hbase.procedure2.Procedure;
+import org.apache.hadoop.hbase.procedure2.ProcedureSuspendedException;
+import 
org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteOperation;
+import 
org.apache.hadoop.hbase.procedure2.RemoteProcedureDispatcher.RemoteProcedure;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProcedureProtos.RegionTransitionState;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionServerStatusProtos.RegionStateTransition.TransitionCode;
+
+/**
+ * Base class for the Assign and Unassign Procedure.
+ * There can only be one RegionTransitionProcedure per region running at a time
+ * since each procedure takes a lock on the region (see 
MasterProcedureScheduler).
+ *
+ * This procedure is asynchronous and responds to external events.
+ * The AssignmentManager will notify this procedure when the RS completes
+ * the operation and reports the transitioned state
+ * (see the Assign and Unassign class for more detail).
+ * Procedures move from the REGION_TRANSITION_QUEUE state when they are
+ * first submitted, to the REGION_TRANSITION_DISPATCH state when the request
+ * to remote server is sent and the Procedure is suspended waiting on external
+ * event to be woken again. Once the external event is triggered, Procedure
+ * moves to the REGION_TRANSITION_FINISH state.
+ */
+@InterfaceAudience.Private
+public abstract class RegionTransitionProcedure
+extends Procedure
+implements TableProcedureInterface,
+  RemoteProcedure {
+  private static final Log LOG = 
LogFactory.getLog(RegionTransitionProcedure.class);
+
+  protected final AtomicBoolean aborted = new AtomicBoolean(false);
+
+  private RegionTransitionState transitionState =
+  RegionTransitionState.REGION_TRANSITION_QUEUE;
+  private HRegionInfo regionInfo;
+  private volatile boolean lock = false;
+
+  public RegionTransitionProcedure() {
+// Required by the Procedure framework to create the procedure on replay
+super();
+  }
+
+  public RegionTransitionProcedure(final HRegionInfo regionInfo) {
+this.regionInfo = regionInfo;
+  }
+
+  public HRegionInfo getRegionInfo() {
+return regionInfo;
+  }
+
+  protected void setRegionInfo(final HRegionInfo regionInfo) {
+// Setter is for deserialization.
+this.regionInfo = regionInfo;
+  }
+
+  @Override
+  public TableName getTableName() {
+HRegionInfo hri = getRegionInfo();
+return hri != null? hri.getTable(): null;
+  }
+
+  public boolean isMeta() {
+return TableName.isMetaTableName(getTableName());
+  }
+
+  @Override
+  public void toStringClassDetails(final StringBuilder sb) {
+sb.append(getClass().getSimpleName());
+sb.append(" table=");
+sb.append(getTableName());
+sb.append(", region=");
+   

[40/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/f8f0a032
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/f8f0a032
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/f8f0a032

Branch: refs/heads/asf-site
Commit: f8f0a03262a9457c313bc82e7650510997fee61c
Parents: 8097dd9
Author: jenkins 
Authored: Sat May 27 14:59:04 2017 +
Committer: jenkins 
Committed: Sat May 27 14:59:04 2017 +

--
 acid-semantics.html | 4 +-
 apache_hbase_reference_guide.pdf| 4 +-
 apache_hbase_reference_guide.pdfmarks   | 4 +-
 apidocs/constant-values.html| 2 +-
 apidocs/index-all.html  | 8 +-
 apidocs/org/apache/hadoop/hbase/HConstants.html |   184 +-
 .../hbase/quotas/ThrottlingException.Type.html  | 4 +-
 .../org/apache/hadoop/hbase/HConstants.html |   538 +-
 bulk-loads.html | 4 +-
 checkstyle-aggregate.html   | 21660 -
 checkstyle.rss  | 8 +-
 coc.html| 4 +-
 cygwin.html | 4 +-
 dependencies.html   | 4 +-
 dependency-convergence.html | 4 +-
 dependency-info.html| 4 +-
 dependency-management.html  | 4 +-
 devapidocs/allclasses-frame.html| 1 -
 devapidocs/allclasses-noframe.html  | 1 -
 devapidocs/constant-values.html | 8 +-
 devapidocs/index-all.html   |50 +-
 .../org/apache/hadoop/hbase/HConstants.html |   186 +-
 .../hadoop/hbase/backup/package-tree.html   | 2 +-
 .../class-use/InterfaceAudience.Private.html|16 +-
 .../hbase/classification/package-tree.html  | 8 +-
 .../hadoop/hbase/client/package-tree.html   |24 +-
 .../hadoop/hbase/filter/package-tree.html   | 4 +-
 .../hadoop/hbase/io/hfile/package-tree.html | 6 +-
 .../apache/hadoop/hbase/ipc/NettyRpcServer.html | 2 +-
 .../hbase/ipc/NettyServerRpcConnection.html | 2 +-
 .../RpcServer.BlockingServiceAndInterface.html  |12 +-
 .../hadoop/hbase/ipc/RpcServer.CallCleanup.html | 4 +-
 .../org/apache/hadoop/hbase/ipc/RpcServer.html  |   203 +-
 .../ServerRpcConnection.ByteBuffByteInput.html  |20 +-
 .../hadoop/hbase/ipc/ServerRpcConnection.html   |   186 +-
 .../hadoop/hbase/ipc/SimpleRpcServer.html   | 2 +-
 .../hbase/ipc/SimpleServerRpcConnection.html| 2 +-
 .../ipc/class-use/FatalConnectionException.html | 4 +-
 .../apache/hadoop/hbase/ipc/package-tree.html   | 2 +-
 .../hadoop/hbase/mapreduce/package-tree.html| 2 +-
 .../hadoop/hbase/master/package-tree.html   | 4 +-
 .../org/apache/hadoop/hbase/package-tree.html   |10 +-
 .../hadoop/hbase/procedure2/package-tree.html   | 6 +-
 .../hadoop/hbase/quotas/package-tree.html   | 8 +-
 ...SRpcServices.RegionScannerCloseCallBack.html | 8 +-
 .../RSRpcServices.RegionScannerHolder.html  |22 +-
 ...pcServices.RegionScannerShippedCallBack.html |12 +-
 ...RpcServices.RegionScannersCloseCallBack.html |10 +-
 .../RSRpcServices.ScannerListener.html  | 8 +-
 .../hbase/regionserver/RSRpcServices.html   |   261 +-
 .../hadoop/hbase/regionserver/package-tree.html |18 +-
 .../regionserver/querymatcher/package-tree.html | 4 +-
 .../hbase/regionserver/wal/package-tree.html| 2 +-
 ...SaslRpcServer.SaslDigestCallbackHandler.html |31 +-
 ...aseSaslRpcServer.SaslGssCallbackHandler.html |12 +-
 .../hbase/security/HBaseSaslRpcServer.html  |   185 +-
 .../security/SaslUtil.QualityOfProtection.html  |18 +-
 .../apache/hadoop/hbase/security/SaslUtil.html  |41 +-
 .../hbase/security/class-use/AuthMethod.html| 7 +-
 .../security/class-use/HBaseSaslRpcServer.html  |44 +-
 .../hadoop/hbase/security/package-frame.html| 1 -
 .../hadoop/hbase/security/package-summary.html  | 8 +-
 .../hadoop/hbase/security/package-tree.html | 2 +-
 .../hadoop/hbase/security/package-use.html  | 9 +-
 .../hadoop/hbase/thrift/package-tree.html   | 2 +-
 .../tmpl/master/MasterStatusTmpl.ImplData.html  |   240 +-
 .../hbase/tmpl/master/MasterStatusTmpl.html |96 +-
 .../hbase/tmpl/master/MasterStatusTmplImpl.html |48 +-
 .../regionserver/RSStatusTmpl.ImplData.html |60 +-
 .../hbase/tmpl/regionserver/RSStatusTmpl.html   |24 +-
 .../tmpl/regionserver/RSStatusTmplImpl.html |12 +-
 .../apache/hadoop/hbase/util/package-tree.html  |10 +-
 

[38/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
index ac13492..ce4327f 100644
--- a/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/src-html/org/apache/hadoop/hbase/HConstants.html
@@ -1101,284 +1101,280 @@
 1093
"hbase.regionserver.wal.enablecompression";
 1094
 1095  /** Configuration name of WAL storage 
policy
-1096   * Valid values are:
-1097   *  NONE: no preference in destination 
of block replicas
-1098   *  ONE_SSD: place only one block 
replica in SSD and the remaining in default storage
-1099   *  and ALL_SSD: place all block 
replicas on SSD
-1100   *
-1101   * See 
http://hadoop.apache.org/docs/r2.6.0/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html*/
-1102  public static final String 
WAL_STORAGE_POLICY = "hbase.wal.storage.policy";
-1103  public static final String 
DEFAULT_WAL_STORAGE_POLICY = "NONE";
+1096   * Valid values are: HOT, COLD, WARM, 
ALL_SSD, ONE_SSD, LAZY_PERSIST
+1097   * See 
http://hadoop.apache.org/docs/r2.7.3/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html*/
+1098  public static final String 
WAL_STORAGE_POLICY = "hbase.wal.storage.policy";
+1099  public static final String 
DEFAULT_WAL_STORAGE_POLICY = "HOT";
+1100
+1101  /** Region in Transition metrics 
threshold time */
+1102  public static final String 
METRICS_RIT_STUCK_WARNING_THRESHOLD =
+1103  
"hbase.metrics.rit.stuck.warning.threshold";
 1104
-1105  /** Region in Transition metrics 
threshold time */
-1106  public static final String 
METRICS_RIT_STUCK_WARNING_THRESHOLD =
-1107  
"hbase.metrics.rit.stuck.warning.threshold";
-1108
-1109  public static final String 
LOAD_BALANCER_SLOP_KEY = "hbase.regions.slop";
-1110
-  /** delimiter used between portions of 
a region name */
-1112  public static final int DELIMITER = 
',';
-1113
-1114  /**
-1115   * QOS attributes: these attributes 
are used to demarcate RPC call processing
-1116   * by different set of handlers. For 
example, HIGH_QOS tagged methods are
-1117   * handled by high priority 
handlers.
-1118   */
-1119  // normal_QOS  replication_QOS 
 replay_QOS  QOS_threshold  admin_QOS  high_QOS
-1120  public static final int NORMAL_QOS = 
0;
-1121  public static final int 
REPLICATION_QOS = 5;
-1122  public static final int REPLAY_QOS = 
6;
-1123  public static final int QOS_THRESHOLD 
= 10;
-1124  public static final int ADMIN_QOS = 
100;
-1125  public static final int HIGH_QOS = 
200;
-1126  public static final int 
SYSTEMTABLE_QOS = HIGH_QOS;
-1127
-1128  /** Directory under /hbase where 
archived hfiles are stored */
-1129  public static final String 
HFILE_ARCHIVE_DIRECTORY = "archive";
-1130
-1131  /**
-1132   * Name of the directory to store all 
snapshots. See SnapshotDescriptionUtils for
-1133   * remaining snapshot constants; this 
is here to keep HConstants dependencies at a minimum and
-1134   * uni-directional.
-1135   */
-1136  public static final String 
SNAPSHOT_DIR_NAME = ".hbase-snapshot";
-1137
-1138  /* Name of old snapshot directory. See 
HBASE-8352 for details on why it needs to be renamed */
-1139  public static final String 
OLD_SNAPSHOT_DIR_NAME = ".snapshot";
-1140
-1141  /** Temporary directory used for table 
creation and deletion */
-1142  public static final String 
HBASE_TEMP_DIRECTORY = ".tmp";
-1143  /**
-1144   * The period (in milliseconds) 
between computing region server point in time metrics
-1145   */
-1146  public static final String 
REGIONSERVER_METRICS_PERIOD = "hbase.regionserver.metrics.period";
-1147  public static final long 
DEFAULT_REGIONSERVER_METRICS_PERIOD = 5000;
-1148  /** Directories that are not HBase 
table directories */
-1149  public static final ListString 
HBASE_NON_TABLE_DIRS =
-1150
Collections.unmodifiableList(Arrays.asList(new String[] {
-1151  HBCK_SIDELINEDIR_NAME, 
HBASE_TEMP_DIRECTORY, MIGRATION_NAME
-1152}));
-1153
-1154  /** Directories that are not HBase 
user table directories */
-1155  public static final ListString 
HBASE_NON_USER_TABLE_DIRS =
-1156
Collections.unmodifiableList(Arrays.asList((String[])ArrayUtils.addAll(
-1157  new String[] { 
TableName.META_TABLE_NAME.getNameAsString() },
-1158  
HBASE_NON_TABLE_DIRS.toArray(;
-1159
-1160  /** Health script related settings. 
*/
-1161  public static final String 
HEALTH_SCRIPT_LOC = "hbase.node.health.script.location";
-1162  public static final String 
HEALTH_SCRIPT_TIMEOUT = "hbase.node.health.script.timeout";
-1163  public static final String 
HEALTH_CHORE_WAKE_FREQ =
-1164  
"hbase.node.health.script.frequency";
-1165  public static final long 
DEFAULT_HEALTH_SCRIPT_TIMEOUT = 6;
-1166  /**
-1167   * The maximum number of health check 
failures a server can encounter consecutively.

[28/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html 
b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
index 4e4f766..e54a621 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
@@ -411,148 +411,148 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 privateboolean m_format__IsNotDefault
 
 
-
+
 
 
 
 
-m_frags
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer m_frags
+m_serverManager
+privateServerManager m_serverManager
 
 
-
+
 
 
 
 
-m_frags__IsNotDefault
-privateboolean m_frags__IsNotDefault
+m_serverManager__IsNotDefault
+privateboolean m_serverManager__IsNotDefault
 
 
-
+
 
 
 
 
-m_deadServers
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetServerName m_deadServers
+m_filter
+privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_filter
 
 
-
+
 
 
 
 
-m_deadServers__IsNotDefault
-privateboolean m_deadServers__IsNotDefault
+m_filter__IsNotDefault
+privateboolean m_filter__IsNotDefault
 
 
-
+
 
 
 
 
-m_serverManager
-privateServerManager m_serverManager
+m_assignmentManager
+privateAssignmentManager m_assignmentManager
 
 
-
+
 
 
 
 
-m_serverManager__IsNotDefault
-privateboolean m_serverManager__IsNotDefault
+m_assignmentManager__IsNotDefault
+privateboolean m_assignmentManager__IsNotDefault
 
 
-
+
 
 
 
 
-m_assignmentManager
-privateAssignmentManager m_assignmentManager
+m_frags
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integer m_frags
 
 
-
+
 
 
 
 
-m_assignmentManager__IsNotDefault
-privateboolean m_assignmentManager__IsNotDefault
+m_frags__IsNotDefault
+privateboolean m_frags__IsNotDefault
 
 
-
+
 
 
 
 
-m_servers
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName m_servers
+m_deadServers
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Set.html?is-external=true;
 title="class or interface in java.util">SetServerName m_deadServers
 
 
-
+
 
 
 
 
-m_servers__IsNotDefault
-privateboolean m_servers__IsNotDefault
+m_deadServers__IsNotDefault
+privateboolean m_deadServers__IsNotDefault
 
 
-
+
 
 
 
 
-m_filter
-privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String m_filter
+m_metaLocation
+privateServerName m_metaLocation
 
 
-
+
 
 
 
 
-m_filter__IsNotDefault
-privateboolean m_filter__IsNotDefault
+m_metaLocation__IsNotDefault
+privateboolean m_metaLocation__IsNotDefault
 
 
-
+
 
 
 
 
-m_metaLocation
-privateServerName m_metaLocation
+m_catalogJanitorEnabled
+privateboolean m_catalogJanitorEnabled
 
 
-
+
 
 
 
 
-m_metaLocation__IsNotDefault
-privateboolean m_metaLocation__IsNotDefault
+m_catalogJanitorEnabled__IsNotDefault
+privateboolean m_catalogJanitorEnabled__IsNotDefault
 
 
-
+
 
 
 
 
-m_catalogJanitorEnabled
-privateboolean m_catalogJanitorEnabled
+m_servers
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListServerName m_servers
 
 
-
+
 
 
 
 
-m_catalogJanitorEnabled__IsNotDefault
-privateboolean m_catalogJanitorEnabled__IsNotDefault
+m_servers__IsNotDefault
+privateboolean m_servers__IsNotDefault
 
 
 
@@ -625,220 +625,220 @@ extends org.jamon.AbstractTemplateProxy.ImplData
 publicbooleangetFormat__IsNotDefault()
 
 
-
+
 
 
 
 
-setFrags
-publicvoidsetFrags(http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Integer.html?is-external=true;
 title="class or interface in java.lang">Integerfrags)
+setServerManager

[03/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApi.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApi.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApi.html
index bd86a22..d60f295 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApi.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApi.html
@@ -25,403 +25,407 @@
 017 */
 018package org.apache.hadoop.hbase.client;
 019
-020import static 
org.junit.Assert.assertEquals;
-021import static 
org.junit.Assert.assertFalse;
-022import static 
org.junit.Assert.assertNotNull;
-023import static 
org.junit.Assert.assertNull;
-024import static 
org.junit.Assert.assertTrue;
-025import static org.junit.Assert.fail;
-026
-027import java.util.ArrayList;
-028import java.util.HashMap;
-029import java.util.HashSet;
-030import java.util.List;
-031import java.util.Map;
-032import java.util.Set;
-033import 
java.util.concurrent.CompletionException;
-034
-035import 
org.apache.hadoop.hbase.HConstants;
-036import 
org.apache.hadoop.hbase.TableName;
-037import 
org.apache.hadoop.hbase.replication.ReplicationException;
-038import 
org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
-039import 
org.apache.hadoop.hbase.replication.ReplicationPeerDescription;
-040import 
org.apache.hadoop.hbase.testclassification.ClientTests;
-041import 
org.apache.hadoop.hbase.testclassification.MediumTests;
-042import org.junit.BeforeClass;
-043import org.junit.Rule;
-044import org.junit.Test;
-045import 
org.junit.experimental.categories.Category;
-046import org.junit.rules.TestName;
-047
-048/**
-049 * Class to test asynchronous replication 
admin operations.
-050 */
-051@Category({MediumTests.class, 
ClientTests.class})
-052public class TestAsyncReplicationAdminApi 
extends TestAsyncAdminBase {
-053
-054  private final String ID_ONE = "1";
-055  private final String KEY_ONE = 
"127.0.0.1:2181:/hbase";
-056  private final String ID_SECOND = "2";
-057  private final String KEY_SECOND = 
"127.0.0.1:2181:/hbase2";
-058
-059  @Rule
-060  public TestName name = new 
TestName();
-061
-062  @BeforeClass
-063  public static void setUpBeforeClass() 
throws Exception {
-064
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
1);
-065TEST_UTIL.startMiniCluster();
-066ASYNC_CONN = 
ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();
-067  }
-068
-069  @Test
-070  public void testAddRemovePeer() throws 
Exception {
-071ReplicationPeerConfig rpc1 = new 
ReplicationPeerConfig();
-072rpc1.setClusterKey(KEY_ONE);
-073ReplicationPeerConfig rpc2 = new 
ReplicationPeerConfig();
-074rpc2.setClusterKey(KEY_SECOND);
-075// Add a valid peer
-076admin.addReplicationPeer(ID_ONE, 
rpc1).join();
-077// try adding the same (fails)
-078try {
-079  admin.addReplicationPeer(ID_ONE, 
rpc1).join();
-080  fail("Test case should fail as 
adding a same peer.");
-081} catch (CompletionException e) {
-082  // OK!
-083}
-084assertEquals(1, 
admin.listReplicationPeers().get().size());
-085// Try to remove an inexisting peer
-086try {
-087  
admin.removeReplicationPeer(ID_SECOND).join();
-088  fail("Test case should fail as 
removing a inexisting peer.");
-089} catch (CompletionException e) {
-090  // OK!
-091}
-092assertEquals(1, 
admin.listReplicationPeers().get().size());
-093// Add a second since multi-slave is 
supported
-094admin.addReplicationPeer(ID_SECOND, 
rpc2).join();
-095assertEquals(2, 
admin.listReplicationPeers().get().size());
-096// Remove the first peer we added
-097
admin.removeReplicationPeer(ID_ONE).join();
-098assertEquals(1, 
admin.listReplicationPeers().get().size());
-099
admin.removeReplicationPeer(ID_SECOND).join();
-100assertEquals(0, 
admin.listReplicationPeers().get().size());
-101  }
-102
-103  @Test
-104  public void testPeerConfig() throws 
Exception {
-105ReplicationPeerConfig config = new 
ReplicationPeerConfig();
-106config.setClusterKey(KEY_ONE);
-107config.getConfiguration().put("key1", 
"value1");
-108config.getConfiguration().put("key2", 
"value2");
-109admin.addReplicationPeer(ID_ONE, 
config).join();
-110
-111
ListReplicationPeerDescription peers = 
admin.listReplicationPeers().get();
-112assertEquals(1, peers.size());
-113ReplicationPeerDescription peerOne = 
peers.get(0);
-114assertNotNull(peerOne);
-115assertEquals("value1", 
peerOne.getPeerConfig().getConfiguration().get("key1"));
-116assertEquals("value2", 
peerOne.getPeerConfig().getConfiguration().get("key2"));
-117
-118
admin.removeReplicationPeer(ID_ONE).join();

[35/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/HConstants.html 
b/devapidocs/org/apache/hadoop/hbase/HConstants.html
index a99b408..8ebebeb 100644
--- a/devapidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/devapidocs/org/apache/hadoop/hbase/HConstants.html
@@ -1889,12 +1889,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 WAL_STORAGE_POLICY
 Configuration name of WAL storage policy
- Valid values are:
-  NONE: no preference in destination of block replicas
-  ONE_SSD: place only one block replica in SSD and the remaining in default 
storage
-  and ALL_SSD: place all block replicas on SSD
-
- See 
http://hadoop.apache.org/docs/r2.6.0/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html
+ Valid values are: HOT, COLD, WARM, ALL_SSD, ONE_SSD, LAZY_PERSIST
+ See 
http://hadoop.apache.org/docs/r2.7.3/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html
 
 
 
@@ -5426,14 +5422,10 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 WAL_STORAGE_POLICY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String WAL_STORAGE_POLICY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String WAL_STORAGE_POLICY
 Configuration name of WAL storage policy
- Valid values are:
-  NONE: no preference in destination of block replicas
-  ONE_SSD: place only one block replica in SSD and the remaining in default 
storage
-  and ALL_SSD: place all block replicas on SSD
-
- See 
http://hadoop.apache.org/docs/r2.6.0/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html
+ Valid values are: HOT, COLD, WARM, ALL_SSD, ONE_SSD, LAZY_PERSIST
+ See 
http://hadoop.apache.org/docs/r2.7.3/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html
 
 See Also:
 Constant
 Field Values
@@ -5446,7 +5438,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 DEFAULT_WAL_STORAGE_POLICY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DEFAULT_WAL_STORAGE_POLICY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DEFAULT_WAL_STORAGE_POLICY
 
 See Also:
 Constant
 Field Values
@@ -5459,7 +5451,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 METRICS_RIT_STUCK_WARNING_THRESHOLD
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String METRICS_RIT_STUCK_WARNING_THRESHOLD
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String METRICS_RIT_STUCK_WARNING_THRESHOLD
 Region in Transition metrics threshold time
 
 See Also:
@@ -5473,7 +5465,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 LOAD_BALANCER_SLOP_KEY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String LOAD_BALANCER_SLOP_KEY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String LOAD_BALANCER_SLOP_KEY
 
 See Also:
 Constant
 Field Values
@@ -5486,7 +5478,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 DELIMITER
-public static finalint DELIMITER
+public static finalint DELIMITER
 delimiter used between portions of a region name
 
 See Also:
@@ -5500,7 +5492,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 NORMAL_QOS
-public static finalint NORMAL_QOS
+public static finalint NORMAL_QOS
 QOS attributes: these attributes are used to demarcate RPC 
call processing
  by different set of handlers. For example, HIGH_QOS tagged methods are
  handled by high priority handlers.
@@ -5516,7 +5508,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 REPLICATION_QOS
-public static finalint REPLICATION_QOS
+public static finalint REPLICATION_QOS
 
 See Also:
 Constant
 Field Values
@@ -5529,7 +5521,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 REPLAY_QOS
-public static finalint REPLAY_QOS
+public static finalint REPLAY_QOS
 
 See Also:
 Constant
 Field Values
@@ -5542,7 +5534,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 QOS_THRESHOLD
-public static finalint QOS_THRESHOLD
+public static finalint 

[23/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.CallCleanup.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.CallCleanup.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.CallCleanup.html
index be47cd3..5bf8c01 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.CallCleanup.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.CallCleanup.html
@@ -38,763 +38,770 @@
 030import 
java.nio.channels.ReadableByteChannel;
 031import 
java.nio.channels.WritableByteChannel;
 032import java.util.ArrayList;
-033import java.util.HashMap;
-034import java.util.List;
-035import java.util.Map;
-036import 
java.util.concurrent.atomic.LongAdder;
-037
-038import org.apache.commons.logging.Log;
-039import 
org.apache.commons.logging.LogFactory;
-040import 
org.apache.hadoop.conf.Configuration;
-041import 
org.apache.hadoop.hbase.CallQueueTooBigException;
-042import 
org.apache.hadoop.hbase.CellScanner;
-043import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-044import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-045import 
org.apache.hadoop.hbase.HConstants;
-046import org.apache.hadoop.hbase.Server;
-047import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-048import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-049import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-050import 
org.apache.hadoop.hbase.exceptions.RequestTooBigException;
-051import 
org.apache.hadoop.hbase.io.ByteBufferPool;
-052import 
org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
-053import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-054import 
org.apache.hadoop.hbase.nio.ByteBuff;
-055import 
org.apache.hadoop.hbase.nio.MultiByteBuff;
-056import 
org.apache.hadoop.hbase.nio.SingleByteBuff;
-057import 
org.apache.hadoop.hbase.regionserver.RSRpcServices;
-058import 
org.apache.hadoop.hbase.security.HBaseSaslRpcServer;
-059import 
org.apache.hadoop.hbase.security.User;
-060import 
org.apache.hadoop.hbase.security.UserProvider;
-061import 
org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService;
-063import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
-064import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
-065import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
-066import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
-070import 
org.apache.hadoop.hbase.util.Pair;
-071import 
org.apache.hadoop.security.UserGroupInformation;
-072import 
org.apache.hadoop.security.authorize.AuthorizationException;
-073import 
org.apache.hadoop.security.authorize.PolicyProvider;
-074import 
org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
-075import 
org.apache.hadoop.security.token.SecretManager;
-076import 
org.apache.hadoop.security.token.TokenIdentifier;
-077import 
org.codehaus.jackson.map.ObjectMapper;
-078
-079/**
-080 * An RPC server that hosts protobuf 
described Services.
-081 *
-082 */
-083@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
 HBaseInterfaceAudience.PHOENIX})
-084@InterfaceStability.Evolving
-085public abstract class RpcServer 
implements RpcServerInterface,
-086ConfigurationObserver {
-087  // LOG is being used in CallRunner and 
the log level is being changed in tests
-088  public static final Log LOG = 
LogFactory.getLog(RpcServer.class);
-089  protected static final 
CallQueueTooBigException CALL_QUEUE_TOO_BIG_EXCEPTION
-090  = new CallQueueTooBigException();
-091
-092  private final boolean authorize;
-093  protected boolean isSecurityEnabled;
+033import java.util.Collections;
+034import java.util.HashMap;
+035import java.util.List;
+036import java.util.Locale;
+037import java.util.Map;
+038import 
java.util.concurrent.atomic.LongAdder;
+039
+040import org.apache.commons.logging.Log;
+041import 
org.apache.commons.logging.LogFactory;
+042import 
org.apache.hadoop.conf.Configuration;
+043import 
org.apache.hadoop.hbase.CallQueueTooBigException;
+044import 
org.apache.hadoop.hbase.CellScanner;
+045import 
org.apache.hadoop.hbase.DoNotRetryIOException;
+046import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
+047import 
org.apache.hadoop.hbase.HConstants;
+048import org.apache.hadoop.hbase.Server;
+049import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+050import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+051import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
+052import 

[14/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
index 7aeb0fb..f01cf3a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
@@ -27,3384 +27,3404 @@
 019package 
org.apache.hadoop.hbase.regionserver;
 020
 021import 
com.google.common.annotations.VisibleForTesting;
-022
-023import java.io.FileNotFoundException;
-024import java.io.IOException;
-025import java.io.InterruptedIOException;
-026import java.net.BindException;
-027import java.net.InetSocketAddress;
-028import java.net.UnknownHostException;
-029import java.nio.ByteBuffer;
-030import java.util.*;
-031import java.util.Map.Entry;
-032import 
java.util.concurrent.ConcurrentHashMap;
-033import 
java.util.concurrent.ConcurrentMap;
-034import 
java.util.concurrent.atomic.AtomicBoolean;
-035import 
java.util.concurrent.atomic.AtomicLong;
-036import 
java.util.concurrent.atomic.LongAdder;
-037
-038import 
org.apache.commons.lang.mutable.MutableObject;
-039import org.apache.commons.logging.Log;
-040import 
org.apache.commons.logging.LogFactory;
-041import 
org.apache.hadoop.conf.Configuration;
-042import org.apache.hadoop.fs.Path;
-043import 
org.apache.hadoop.hbase.ByteBufferCell;
-044import org.apache.hadoop.hbase.Cell;
-045import 
org.apache.hadoop.hbase.CellScannable;
-046import 
org.apache.hadoop.hbase.CellScanner;
-047import 
org.apache.hadoop.hbase.CellUtil;
-048import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-049import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-050import 
org.apache.hadoop.hbase.HBaseIOException;
-051import 
org.apache.hadoop.hbase.HConstants;
-052import 
org.apache.hadoop.hbase.HRegionInfo;
-053import 
org.apache.hadoop.hbase.HTableDescriptor;
-054import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-055import 
org.apache.hadoop.hbase.NotServingRegionException;
-056import 
org.apache.hadoop.hbase.ServerName;
-057import 
org.apache.hadoop.hbase.TableName;
-058import 
org.apache.hadoop.hbase.UnknownScannerException;
-059import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-060import 
org.apache.hadoop.hbase.client.Append;
-061import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-062import 
org.apache.hadoop.hbase.client.Delete;
-063import 
org.apache.hadoop.hbase.client.Durability;
-064import 
org.apache.hadoop.hbase.client.Get;
-065import 
org.apache.hadoop.hbase.client.Increment;
-066import 
org.apache.hadoop.hbase.client.Mutation;
-067import 
org.apache.hadoop.hbase.client.Put;
-068import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-069import 
org.apache.hadoop.hbase.client.Result;
-070import 
org.apache.hadoop.hbase.client.RowMutations;
-071import 
org.apache.hadoop.hbase.client.Scan;
-072import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-073import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-074import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-075import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-076import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-077import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-078import 
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-079import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-080import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-081import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-082import 
org.apache.hadoop.hbase.ipc.QosPriority;
-083import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-084import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-085import 
org.apache.hadoop.hbase.ipc.RpcServer;
-086import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-087import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-088import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-089import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-090import 
org.apache.hadoop.hbase.ipc.ServerRpcController;
-091import 
org.apache.hadoop.hbase.master.MasterRpcServices;
-092import 
org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;
-093import 
org.apache.hadoop.hbase.quotas.OperationQuota;
-094import 
org.apache.hadoop.hbase.quotas.QuotaUtil;
-095import 
org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
-096import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-097import 
org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;
-098import 
org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;
-099import 
org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;
-100import 
org.apache.hadoop.hbase.regionserver.Leases.Lease;
-101import 

[24/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
index be47cd3..5bf8c01 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.BlockingServiceAndInterface.html
@@ -38,763 +38,770 @@
 030import 
java.nio.channels.ReadableByteChannel;
 031import 
java.nio.channels.WritableByteChannel;
 032import java.util.ArrayList;
-033import java.util.HashMap;
-034import java.util.List;
-035import java.util.Map;
-036import 
java.util.concurrent.atomic.LongAdder;
-037
-038import org.apache.commons.logging.Log;
-039import 
org.apache.commons.logging.LogFactory;
-040import 
org.apache.hadoop.conf.Configuration;
-041import 
org.apache.hadoop.hbase.CallQueueTooBigException;
-042import 
org.apache.hadoop.hbase.CellScanner;
-043import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-044import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-045import 
org.apache.hadoop.hbase.HConstants;
-046import org.apache.hadoop.hbase.Server;
-047import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-048import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-049import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-050import 
org.apache.hadoop.hbase.exceptions.RequestTooBigException;
-051import 
org.apache.hadoop.hbase.io.ByteBufferPool;
-052import 
org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
-053import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-054import 
org.apache.hadoop.hbase.nio.ByteBuff;
-055import 
org.apache.hadoop.hbase.nio.MultiByteBuff;
-056import 
org.apache.hadoop.hbase.nio.SingleByteBuff;
-057import 
org.apache.hadoop.hbase.regionserver.RSRpcServices;
-058import 
org.apache.hadoop.hbase.security.HBaseSaslRpcServer;
-059import 
org.apache.hadoop.hbase.security.User;
-060import 
org.apache.hadoop.hbase.security.UserProvider;
-061import 
org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService;
-063import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
-064import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
-065import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
-066import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
-070import 
org.apache.hadoop.hbase.util.Pair;
-071import 
org.apache.hadoop.security.UserGroupInformation;
-072import 
org.apache.hadoop.security.authorize.AuthorizationException;
-073import 
org.apache.hadoop.security.authorize.PolicyProvider;
-074import 
org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
-075import 
org.apache.hadoop.security.token.SecretManager;
-076import 
org.apache.hadoop.security.token.TokenIdentifier;
-077import 
org.codehaus.jackson.map.ObjectMapper;
-078
-079/**
-080 * An RPC server that hosts protobuf 
described Services.
-081 *
-082 */
-083@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
 HBaseInterfaceAudience.PHOENIX})
-084@InterfaceStability.Evolving
-085public abstract class RpcServer 
implements RpcServerInterface,
-086ConfigurationObserver {
-087  // LOG is being used in CallRunner and 
the log level is being changed in tests
-088  public static final Log LOG = 
LogFactory.getLog(RpcServer.class);
-089  protected static final 
CallQueueTooBigException CALL_QUEUE_TOO_BIG_EXCEPTION
-090  = new CallQueueTooBigException();
-091
-092  private final boolean authorize;
-093  protected boolean isSecurityEnabled;
+033import java.util.Collections;
+034import java.util.HashMap;
+035import java.util.List;
+036import java.util.Locale;
+037import java.util.Map;
+038import 
java.util.concurrent.atomic.LongAdder;
+039
+040import org.apache.commons.logging.Log;
+041import 
org.apache.commons.logging.LogFactory;
+042import 
org.apache.hadoop.conf.Configuration;
+043import 
org.apache.hadoop.hbase.CallQueueTooBigException;
+044import 
org.apache.hadoop.hbase.CellScanner;
+045import 
org.apache.hadoop.hbase.DoNotRetryIOException;
+046import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
+047import 
org.apache.hadoop.hbase.HConstants;
+048import org.apache.hadoop.hbase.Server;
+049import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+050import 
org.apache.hadoop.hbase.classification.InterfaceStability;

[18/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
index 7aeb0fb..f01cf3a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerHolder.html
@@ -27,3384 +27,3404 @@
 019package 
org.apache.hadoop.hbase.regionserver;
 020
 021import 
com.google.common.annotations.VisibleForTesting;
-022
-023import java.io.FileNotFoundException;
-024import java.io.IOException;
-025import java.io.InterruptedIOException;
-026import java.net.BindException;
-027import java.net.InetSocketAddress;
-028import java.net.UnknownHostException;
-029import java.nio.ByteBuffer;
-030import java.util.*;
-031import java.util.Map.Entry;
-032import 
java.util.concurrent.ConcurrentHashMap;
-033import 
java.util.concurrent.ConcurrentMap;
-034import 
java.util.concurrent.atomic.AtomicBoolean;
-035import 
java.util.concurrent.atomic.AtomicLong;
-036import 
java.util.concurrent.atomic.LongAdder;
-037
-038import 
org.apache.commons.lang.mutable.MutableObject;
-039import org.apache.commons.logging.Log;
-040import 
org.apache.commons.logging.LogFactory;
-041import 
org.apache.hadoop.conf.Configuration;
-042import org.apache.hadoop.fs.Path;
-043import 
org.apache.hadoop.hbase.ByteBufferCell;
-044import org.apache.hadoop.hbase.Cell;
-045import 
org.apache.hadoop.hbase.CellScannable;
-046import 
org.apache.hadoop.hbase.CellScanner;
-047import 
org.apache.hadoop.hbase.CellUtil;
-048import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-049import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-050import 
org.apache.hadoop.hbase.HBaseIOException;
-051import 
org.apache.hadoop.hbase.HConstants;
-052import 
org.apache.hadoop.hbase.HRegionInfo;
-053import 
org.apache.hadoop.hbase.HTableDescriptor;
-054import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-055import 
org.apache.hadoop.hbase.NotServingRegionException;
-056import 
org.apache.hadoop.hbase.ServerName;
-057import 
org.apache.hadoop.hbase.TableName;
-058import 
org.apache.hadoop.hbase.UnknownScannerException;
-059import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-060import 
org.apache.hadoop.hbase.client.Append;
-061import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-062import 
org.apache.hadoop.hbase.client.Delete;
-063import 
org.apache.hadoop.hbase.client.Durability;
-064import 
org.apache.hadoop.hbase.client.Get;
-065import 
org.apache.hadoop.hbase.client.Increment;
-066import 
org.apache.hadoop.hbase.client.Mutation;
-067import 
org.apache.hadoop.hbase.client.Put;
-068import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-069import 
org.apache.hadoop.hbase.client.Result;
-070import 
org.apache.hadoop.hbase.client.RowMutations;
-071import 
org.apache.hadoop.hbase.client.Scan;
-072import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-073import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-074import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-075import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-076import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-077import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-078import 
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-079import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-080import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-081import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-082import 
org.apache.hadoop.hbase.ipc.QosPriority;
-083import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-084import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-085import 
org.apache.hadoop.hbase.ipc.RpcServer;
-086import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-087import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-088import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-089import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-090import 
org.apache.hadoop.hbase.ipc.ServerRpcController;
-091import 
org.apache.hadoop.hbase.master.MasterRpcServices;
-092import 
org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;
-093import 
org.apache.hadoop.hbase.quotas.OperationQuota;
-094import 
org.apache.hadoop.hbase.quotas.QuotaUtil;
-095import 
org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
-096import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-097import 
org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;
-098import 
org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;
-099import 

[32/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/org/apache/hadoop/hbase/ipc/ServerRpcConnection.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/ServerRpcConnection.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/ServerRpcConnection.html
index 51dce01..5c78545 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/ServerRpcConnection.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/ServerRpcConnection.html
@@ -117,7 +117,8 @@ var activeTableTab = "activeTableTab";
 
 
 
-abstract class ServerRpcConnection
+@InterfaceAudience.Private
+abstract class ServerRpcConnection
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable
 Reads calls from a connection and queues them for 
handling.
@@ -163,102 +164,98 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 addr
 
 
-private 
org.apache.hadoop.security.UserGroupInformation
-attemptingUser
-
-
 protected boolean
 authenticatedWithFallback
 
-
+
 protected AuthMethod
 authMethod
 
-
+
 protected RpcServer.CallCleanup
 callCleanup
 
-
+
 protected Codec
 codec
 Codec the client asked use.
 
 
-
+
 protected 
org.apache.hadoop.io.compress.CompressionCodec
 compressionCodec
 Compression codec the client asked us use.
 
 
-
+
 protected 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader
 connectionHeader
 
-
+
 protected boolean
 connectionHeaderRead
 
-
+
 protected CryptoAES
 cryptoAES
 
-
+
 protected http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 hostAddress
 
-
+
 protected int
 remotePort
 
-
+
 protected boolean
 retryImmediatelySupported
 
-
+
 protected RpcServer
 rpcServer
 
-
+
 protected boolean
 saslContextEstablished
 
-
-protected http://docs.oracle.com/javase/8/docs/api/javax/security/sasl/SaslServer.html?is-external=true;
 title="class or interface in javax.security.sasl">SaslServer
+
+protected HBaseSaslRpcServer
 saslServer
 
-
+
 protected 
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService
 service
 
-
+
 protected boolean
 skipInitialSaslHandshake
 
-
+
 protected 
org.apache.hadoop.security.UserGroupInformation
 ugi
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
 unwrappedData
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/nio/ByteBuffer.html?is-external=true;
 title="class or interface in java.nio">ByteBuffer
 unwrappedDataLengthBuffer
 
-
+
 protected boolean
 useCryptoAesWrap
 
-
+
 protected User
 user
 
-
+
 protected boolean
 useSasl
 
-
+
 protected boolean
 useWrap
 
@@ -313,7 +310,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
   RpcServer.CallCleanupreqCleanup)
 
 
-protected 
org.apache.hadoop.security.UserGroupInformation
+private 
org.apache.hadoop.security.UserGroupInformation
 createUser(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderhead)
 
 
@@ -343,7 +340,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 doRespond(RpcResponseresp)
 
 
-protected 
org.apache.hadoop.security.UserGroupInformation
+private 
org.apache.hadoop.security.UserGroupInformation
 getAuthorizedUgi(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in 
java.lang">StringauthorizedId)
 
 
@@ -356,7 +353,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exceptione)
 
 
-protected http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+private http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getFatalConnectionString(intversion,
 byteauthByte)
 
@@ -411,13 +408,13 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 saslReadAndProcess(ByteBuffsaslToken)
 
 
-protected void
+private void
 setupCellBlockCodecs(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderheader)
 Set up cell block codecs
 
 
 
-protected void
+private void
 setupCryptoCipher(org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderheader,
  
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeaderResponse.BuilderchrBuilder)
 Set up cipher for rpc encryption with Apache Commons 
Crypto
@@ -462,7 +459,7 @@ 

hbase-site git commit: INFRA-10751 Empty commit

2017-05-27 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site f8f0a0326 -> 8fe8da7ba


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/8fe8da7b
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/8fe8da7b
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/8fe8da7b

Branch: refs/heads/asf-site
Commit: 8fe8da7bac246f73a7c83687914d36d78f6b2ef4
Parents: f8f0a03
Author: jenkins 
Authored: Sat May 27 14:59:40 2017 +
Committer: jenkins 
Committed: Sat May 27 14:59:40 2017 +

--

--




[12/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.html
index 2aea531..4ff3ed5 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.html
@@ -6,7 +6,7 @@
 
 
 
-001/*
+001/**
 002 * Licensed to the Apache Software 
Foundation (ASF) under one
 003 * or more contributor license 
agreements.  See the NOTICE file
 004 * distributed with this work for 
additional information
@@ -23,169 +23,225 @@
 015 * See the License for the specific 
language governing permissions and
 016 * limitations under the License.
 017 */
-018
-019package 
org.apache.hadoop.hbase.security;
-020
-021import java.io.ByteArrayInputStream;
-022import java.io.DataInputStream;
-023import java.io.IOException;
-024import java.util.Locale;
-025import java.util.Map;
-026import java.util.function.Consumer;
-027
-028import 
javax.security.auth.callback.Callback;
-029import 
javax.security.auth.callback.CallbackHandler;
-030import 
javax.security.auth.callback.NameCallback;
-031import 
javax.security.auth.callback.PasswordCallback;
-032import 
javax.security.auth.callback.UnsupportedCallbackException;
-033import 
javax.security.sasl.AuthorizeCallback;
-034import 
javax.security.sasl.RealmCallback;
-035
-036import org.apache.commons.logging.Log;
-037import 
org.apache.commons.logging.LogFactory;
-038import 
org.apache.hadoop.conf.Configuration;
+018package 
org.apache.hadoop.hbase.security;
+019
+020import java.io.ByteArrayInputStream;
+021import java.io.DataInputStream;
+022import java.io.IOException;
+023import 
java.security.PrivilegedExceptionAction;
+024import java.util.Map;
+025
+026import 
javax.security.auth.callback.Callback;
+027import 
javax.security.auth.callback.CallbackHandler;
+028import 
javax.security.auth.callback.NameCallback;
+029import 
javax.security.auth.callback.PasswordCallback;
+030import 
javax.security.auth.callback.UnsupportedCallbackException;
+031import 
javax.security.sasl.AuthorizeCallback;
+032import 
javax.security.sasl.RealmCallback;
+033import javax.security.sasl.Sasl;
+034import 
javax.security.sasl.SaslException;
+035import javax.security.sasl.SaslServer;
+036
+037import org.apache.commons.logging.Log;
+038import 
org.apache.commons.logging.LogFactory;
 039import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-040import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-041import 
org.apache.hadoop.security.UserGroupInformation;
-042import 
org.apache.hadoop.security.token.SecretManager;
-043import 
org.apache.hadoop.security.token.SecretManager.InvalidToken;
-044import 
org.apache.hadoop.security.token.TokenIdentifier;
-045
-046/**
-047 * A utility class for dealing with SASL 
on RPC server
+040import 
org.apache.hadoop.security.UserGroupInformation;
+041import 
org.apache.hadoop.security.token.SecretManager;
+042import 
org.apache.hadoop.security.token.SecretManager.InvalidToken;
+043import 
org.apache.hadoop.security.token.TokenIdentifier;
+044
+045/**
+046 * A utility class that encapsulates SASL 
logic for RPC server. Copied from
+047 * 
codeorg.apache.hadoop.security/code
 048 */
 049@InterfaceAudience.Private
 050public class HBaseSaslRpcServer {
-051  private static final Log LOG = 
LogFactory.getLog(HBaseSaslRpcServer.class);
-052
-053  private static MapString, 
String saslProps = null;
-054
-055  public static void init(Configuration 
conf) {
-056saslProps = 
SaslUtil.initSaslProperties(conf.get("hbase.rpc.protection",
-057  
QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)));
-058  }
-059
-060  public static MapString, String 
getSaslProps() {
-061return saslProps;
-062  }
-063
-064  public static T extends 
TokenIdentifier T getIdentifier(String id,
-065  SecretManagerT 
secretManager) throws InvalidToken {
-066byte[] tokenId = 
SaslUtil.decodeIdentifier(id);
-067T tokenIdentifier = 
secretManager.createIdentifier();
-068try {
-069  tokenIdentifier.readFields(new 
DataInputStream(new ByteArrayInputStream(
-070  tokenId)));
-071} catch (IOException e) {
-072  throw (InvalidToken) new 
InvalidToken(
-073  "Can't de-serialize 
tokenIdentifier").initCause(e);
-074}
-075return tokenIdentifier;
-076  }
-077
-078
-079  /** CallbackHandler for SASL DIGEST-MD5 
mechanism */
-080  public static class 
SaslDigestCallbackHandler implements CallbackHandler {
-081private 
SecretManagerTokenIdentifier secretManager;
-082private 
ConsumerUserGroupInformation attemptingUserConsumer;
-083
-084public 
SaslDigestCallbackHandler(SecretManagerTokenIdentifier 

[33/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/org/apache/hadoop/hbase/ipc/RpcServer.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/ipc/RpcServer.html 
b/devapidocs/org/apache/hadoop/hbase/ipc/RpcServer.html
index c9d55e3..7f7c8ab 100644
--- a/devapidocs/org/apache/hadoop/hbase/ipc/RpcServer.html
+++ b/devapidocs/org/apache/hadoop/hbase/ipc/RpcServer.html
@@ -119,7 +119,7 @@ var activeTableTab = "activeTableTab";
 
 @InterfaceAudience.LimitedPrivate(value={"Coprocesssor","Phoenix"})
  @InterfaceStability.Evolving
-public abstract class RpcServer
+public abstract class RpcServer
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements RpcServerInterface, ConfigurationObserver
 An RPC server that hosts protobuf described Services.
@@ -346,53 +346,57 @@ implements 
+protected http://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">Maphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+saslProps
+
+
 protected RpcScheduler
 scheduler
 
-
+
 protected 
org.apache.hadoop.security.token.SecretManagerorg.apache.hadoop.security.token.TokenIdentifier
 secretManager
 
-
+
 protected Server
 server
 
-
+
 protected http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListRpcServer.BlockingServiceAndInterface
 services
 
-
+
 (package private) boolean
 started
 This flag is set to true after all threads are up and 
'running' and the server is then opened
  for business by the call to RpcServerInterface.start().
 
 
-
+
 protected boolean
 tcpKeepAlive
 
-
+
 protected boolean
 tcpNoDelay
 
-
+
 protected UserProvider
 userProvider
 
-
+
 protected static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 WARN_RESPONSE_SIZE
 
-
+
 protected static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 WARN_RESPONSE_TIME
 
-
+
 protected int
 warnResponseSize
 
-
+
 protected int
 warnResponseTime
 
@@ -655,7 +659,7 @@ implements 
 
 LOG
-public static finalorg.apache.commons.logging.Log LOG
+public static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -664,7 +668,7 @@ implements 
 
 CALL_QUEUE_TOO_BIG_EXCEPTION
-protected static finalCallQueueTooBigException CALL_QUEUE_TOO_BIG_EXCEPTION
+protected static finalCallQueueTooBigException CALL_QUEUE_TOO_BIG_EXCEPTION
 
 
 
@@ -673,7 +677,7 @@ implements 
 
 authorize
-private finalboolean authorize
+private finalboolean authorize
 
 
 
@@ -682,7 +686,7 @@ implements 
 
 isSecurityEnabled
-protectedboolean isSecurityEnabled
+protectedboolean isSecurityEnabled
 
 
 
@@ -691,7 +695,7 @@ implements 
 
 CURRENT_VERSION
-public static finalbyte CURRENT_VERSION
+public static finalbyte CURRENT_VERSION
 
 See Also:
 Constant
 Field Values
@@ -704,7 +708,7 @@ implements 
 
 FALLBACK_TO_INSECURE_CLIENT_AUTH
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String FALLBACK_TO_INSECURE_CLIENT_AUTH
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String FALLBACK_TO_INSECURE_CLIENT_AUTH
 Whether we allow a fallback to SIMPLE auth for insecure 
clients when security is enabled.
 
 See Also:
@@ -718,7 +722,7 @@ implements 
 
 DEFAULT_MAX_CALLQUEUE_LENGTH_PER_HANDLER
-protected static finalint DEFAULT_MAX_CALLQUEUE_LENGTH_PER_HANDLER
+protected static finalint DEFAULT_MAX_CALLQUEUE_LENGTH_PER_HANDLER
 How many calls/handler are allowed in the queue.
 
 See Also:
@@ -732,7 +736,7 @@ implements 
 
 cellBlockBuilder
-protected finalCellBlockBuilder cellBlockBuilder
+protected finalCellBlockBuilder cellBlockBuilder
 
 
 
@@ -741,7 +745,7 @@ implements 
 
 AUTH_FAILED_FOR
-protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String AUTH_FAILED_FOR
+protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String AUTH_FAILED_FOR
 
 See Also:
 Constant
 Field Values
@@ -754,7 +758,7 @@ implements 
 
 AUTH_SUCCESSFUL_FOR
-protected static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String AUTH_SUCCESSFUL_FOR
+protected static 

[05/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/testdevapidocs/org/apache/hadoop/hbase/ipc/AbstractTestIPC.TestFailingRpcServer.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/ipc/AbstractTestIPC.TestFailingRpcServer.html
 
b/testdevapidocs/org/apache/hadoop/hbase/ipc/AbstractTestIPC.TestFailingRpcServer.html
index 05db8dd..1fb25f1 100644
--- 
a/testdevapidocs/org/apache/hadoop/hbase/ipc/AbstractTestIPC.TestFailingRpcServer.html
+++ 
b/testdevapidocs/org/apache/hadoop/hbase/ipc/AbstractTestIPC.TestFailingRpcServer.html
@@ -179,7 +179,7 @@ extends org.apache.hadoop.hbase.ipc.SimpleRpcServer
 
 
 Fields inherited from classorg.apache.hadoop.hbase.ipc.RpcServer
-allowFallbackToSimpleAuth, AUDITLOG, AUTH_FAILED_FOR, 
AUTH_SUCCESSFUL_FOR, authManager, authTokenSecretMgr, bindAddress, 
CALL_QUEUE_TOO_BIG_EXCEPTION, callQueueSizeInBytes, cellBlockBuilder, conf, 
CurCall, CURRENT_VERSION, DEFAULT_MAX_CALLQUEUE_LENGTH_PER_HANDLER, 
DEFAULT_MAX_CALLQUEUE_SIZE, DEFAULT_MAX_REQUEST_SIZE, 
DEFAULT_MIN_CLIENT_REQUEST_TIMEOUT, DEFAULT_WARN_RESPONSE_SIZE, 
DEFAULT_WARN_RESPONSE_TIME, errorHandler, FALLBACK_TO_INSECURE_CLIENT_AUTH, 
isSecurityEnabled, LOG, MAPPER, MAX_REQUEST_SIZE, maxQueueSizeInBytes, 
maxRequestSize, metrics, MIN_CLIENT_REQUEST_TIMEOUT, minClientRequestTimeout, 
minSizeForReservoirUse, MONITORED_RPC, NIO_BUFFER_LIMIT, 
REQUEST_TOO_BIG_EXCEPTION, reservoir, running, scheduler, secretManager, 
server, services, started, tcpKeepAlive, tcpNoDelay, userProvider, 
WARN_RESPONSE_SIZE, WARN_RESPONSE_TIME, warnResponseSize, 
warnResponseTime
+allowFallbackToSimpleAuth, AUDITLOG, AUTH_FAILED_FOR, 
AUTH_SUCCESSFUL_FOR, authManager, authTokenSecretMgr, bindAddress, 
CALL_QUEUE_TOO_BIG_EXCEPTION, callQueueSizeInBytes, cellBlockBuilder, conf, 
CurCall, CURRENT_VERSION, DEFAULT_MAX_CALLQUEUE_LENGTH_PER_HANDLER, 
DEFAULT_MAX_CALLQUEUE_SIZE, DEFAULT_MAX_REQUEST_SIZE, 
DEFAULT_MIN_CLIENT_REQUEST_TIMEOUT, DEFAULT_WARN_RESPONSE_SIZE, 
DEFAULT_WARN_RESPONSE_TIME, errorHandler, FALLBACK_TO_INSECURE_CLIENT_AUTH, 
isSecurityEnabled, LOG, MAPPER, MAX_REQUEST_SIZE, maxQueueSizeInBytes, 
maxRequestSize, metrics, MIN_CLIENT_REQUEST_TIMEOUT, minClientRequestTimeout, 
minSizeForReservoirUse, MONITORED_RPC, NIO_BUFFER_LIMIT, 
REQUEST_TOO_BIG_EXCEPTION, reservoir, running, saslProps, scheduler, 
secretManager, server, services, started, tcpKeepAlive, tcpNoDelay, 
userProvider, WARN_RESPONSE_SIZE, WARN_RESPONSE_TIME, warnResponseSize, 
warnResponseTime
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
--
diff --git a/testdevapidocs/org/apache/hadoop/hbase/package-tree.html 
b/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
index 7e488be..51fcfb8 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -542,14 +542,14 @@
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.ScanPerformanceEvaluation.ScanCounter
-org.apache.hadoop.hbase.RESTApiClusterManager.RoleCommand
 org.apache.hadoop.hbase.RESTApiClusterManager.Service
+org.apache.hadoop.hbase.PerformanceEvaluation.Counter
 org.apache.hadoop.hbase.ClusterManager.ServiceType
 org.apache.hadoop.hbase.HBaseClusterManager.CommandProvider.Operation
+org.apache.hadoop.hbase.RESTApiClusterManager.RoleCommand
+org.apache.hadoop.hbase.ResourceChecker.Phase
 org.apache.hadoop.hbase.IntegrationTestDDLMasterFailover.ACTION
-org.apache.hadoop.hbase.PerformanceEvaluation.Counter
 org.apache.hadoop.hbase.IntegrationTestRegionReplicaPerf.Stat
-org.apache.hadoop.hbase.ResourceChecker.Phase
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html 
b/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index ee59b2d..9c4f4b0 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -205,8 +205,8 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements 

[30/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
index 1d2b0a5..d31ce8b 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.html
@@ -118,7 +118,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class RSRpcServices
+public class RSRpcServices
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements HBaseRPCErrorHandler, 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService.BlockingInterface,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface,
 PriorityFunction, ConfigurationObserver
 Implements the regionserver RPC services.
@@ -191,96 +191,100 @@ implements clearCompactionQueues
 
 
+private com.google.common.cache.Cachehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
+closedScanners
+
+
 private static long
 DEFAULT_REGION_SERVER_RPC_MINIMUM_SCAN_TIME_LIMIT_DELTA
 Default value of REGION_SERVER_RPC_MINIMUM_SCAN_TIME_LIMIT_DELTA
 
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/net/InetSocketAddress.html?is-external=true;
 title="class or interface in java.net">InetSocketAddress
 isa
 
-
+
 protected static 
org.apache.commons.logging.Log
 LOG
 
-
+
 private long
 maxScannerResultSize
 
-
+
 private long
 minimumScanTimeLimitDelta
 The minimum allowable delta to use for the scan limit
 
 
-
+
 private PriorityFunction
 priority
 
-
+
 private static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_RPC_MINIMUM_SCAN_TIME_LIMIT_DELTA
 Minimum allowable time limit delta (in milliseconds) that 
can be enforced during scans.
 
 
-
+
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS
 RPC scheduler to use for the region server.
 
 
-
+
 private HRegionServer
 regionServer
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder
 requestCount
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder
 rpcGetRequestCount
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder
 rpcMultiRequestCount
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder
 rpcMutateRequestCount
 
-
+
 (package private) http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/LongAdder.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">LongAdder
 rpcScanRequestCount
 
-
+
 (package private) RpcServerInterface
 rpcServer
 
-
+
 private int
 rpcTimeout
 The RPC timeout period (milliseconds)
 
 
-
+
 private static http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 SCANNER_ALREADY_CLOSED
 Deprecated.
 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/atomic/AtomicLong.html?is-external=true;
 title="class or interface in 
java.util.concurrent.atomic">AtomicLong
 scannerIdGen
 
-
+
 private int
 scannerLeaseTimeoutPeriod
 The lease timeout period for client scanners 
(milliseconds).
 
 
-
+
 private http://docs.oracle.com/javase/8/docs/api/java/util/concurrent/ConcurrentMap.html?is-external=true;
 title="class or interface in java.util.concurrent">ConcurrentMaphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,RSRpcServices.RegionScannerHolder
 scanners
 
@@ -854,7 +858,7 @@ implements 
 
 LOG
-protected static finalorg.apache.commons.logging.Log LOG
+protected static finalorg.apache.commons.logging.Log LOG
 
 
 
@@ -863,7 +867,7 @@ implements 
 
 REGION_SERVER_RPC_SCHEDULER_FACTORY_CLASS
-public static 

[25/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
index ac13492..ce4327f 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.html
@@ -1101,284 +1101,280 @@
 1093
"hbase.regionserver.wal.enablecompression";
 1094
 1095  /** Configuration name of WAL storage 
policy
-1096   * Valid values are:
-1097   *  NONE: no preference in destination 
of block replicas
-1098   *  ONE_SSD: place only one block 
replica in SSD and the remaining in default storage
-1099   *  and ALL_SSD: place all block 
replicas on SSD
-1100   *
-1101   * See 
http://hadoop.apache.org/docs/r2.6.0/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html*/
-1102  public static final String 
WAL_STORAGE_POLICY = "hbase.wal.storage.policy";
-1103  public static final String 
DEFAULT_WAL_STORAGE_POLICY = "NONE";
+1096   * Valid values are: HOT, COLD, WARM, 
ALL_SSD, ONE_SSD, LAZY_PERSIST
+1097   * See 
http://hadoop.apache.org/docs/r2.7.3/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html*/
+1098  public static final String 
WAL_STORAGE_POLICY = "hbase.wal.storage.policy";
+1099  public static final String 
DEFAULT_WAL_STORAGE_POLICY = "HOT";
+1100
+1101  /** Region in Transition metrics 
threshold time */
+1102  public static final String 
METRICS_RIT_STUCK_WARNING_THRESHOLD =
+1103  
"hbase.metrics.rit.stuck.warning.threshold";
 1104
-1105  /** Region in Transition metrics 
threshold time */
-1106  public static final String 
METRICS_RIT_STUCK_WARNING_THRESHOLD =
-1107  
"hbase.metrics.rit.stuck.warning.threshold";
-1108
-1109  public static final String 
LOAD_BALANCER_SLOP_KEY = "hbase.regions.slop";
-1110
-  /** delimiter used between portions of 
a region name */
-1112  public static final int DELIMITER = 
',';
-1113
-1114  /**
-1115   * QOS attributes: these attributes 
are used to demarcate RPC call processing
-1116   * by different set of handlers. For 
example, HIGH_QOS tagged methods are
-1117   * handled by high priority 
handlers.
-1118   */
-1119  // normal_QOS  replication_QOS 
 replay_QOS  QOS_threshold  admin_QOS  high_QOS
-1120  public static final int NORMAL_QOS = 
0;
-1121  public static final int 
REPLICATION_QOS = 5;
-1122  public static final int REPLAY_QOS = 
6;
-1123  public static final int QOS_THRESHOLD 
= 10;
-1124  public static final int ADMIN_QOS = 
100;
-1125  public static final int HIGH_QOS = 
200;
-1126  public static final int 
SYSTEMTABLE_QOS = HIGH_QOS;
-1127
-1128  /** Directory under /hbase where 
archived hfiles are stored */
-1129  public static final String 
HFILE_ARCHIVE_DIRECTORY = "archive";
-1130
-1131  /**
-1132   * Name of the directory to store all 
snapshots. See SnapshotDescriptionUtils for
-1133   * remaining snapshot constants; this 
is here to keep HConstants dependencies at a minimum and
-1134   * uni-directional.
-1135   */
-1136  public static final String 
SNAPSHOT_DIR_NAME = ".hbase-snapshot";
-1137
-1138  /* Name of old snapshot directory. See 
HBASE-8352 for details on why it needs to be renamed */
-1139  public static final String 
OLD_SNAPSHOT_DIR_NAME = ".snapshot";
-1140
-1141  /** Temporary directory used for table 
creation and deletion */
-1142  public static final String 
HBASE_TEMP_DIRECTORY = ".tmp";
-1143  /**
-1144   * The period (in milliseconds) 
between computing region server point in time metrics
-1145   */
-1146  public static final String 
REGIONSERVER_METRICS_PERIOD = "hbase.regionserver.metrics.period";
-1147  public static final long 
DEFAULT_REGIONSERVER_METRICS_PERIOD = 5000;
-1148  /** Directories that are not HBase 
table directories */
-1149  public static final ListString 
HBASE_NON_TABLE_DIRS =
-1150
Collections.unmodifiableList(Arrays.asList(new String[] {
-1151  HBCK_SIDELINEDIR_NAME, 
HBASE_TEMP_DIRECTORY, MIGRATION_NAME
-1152}));
-1153
-1154  /** Directories that are not HBase 
user table directories */
-1155  public static final ListString 
HBASE_NON_USER_TABLE_DIRS =
-1156
Collections.unmodifiableList(Arrays.asList((String[])ArrayUtils.addAll(
-1157  new String[] { 
TableName.META_TABLE_NAME.getNameAsString() },
-1158  
HBASE_NON_TABLE_DIRS.toArray(;
-1159
-1160  /** Health script related settings. 
*/
-1161  public static final String 
HEALTH_SCRIPT_LOC = "hbase.node.health.script.location";
-1162  public static final String 
HEALTH_SCRIPT_TIMEOUT = "hbase.node.health.script.timeout";
-1163  public static final String 
HEALTH_CHORE_WAKE_FREQ =
-1164  
"hbase.node.health.script.frequency";
-1165  public static final long 
DEFAULT_HEALTH_SCRIPT_TIMEOUT = 6;
-1166  /**
-1167   * The maximum number of health check 
failures a server can encounter 

[09/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/hbase-annotations/issue-tracking.html
--
diff --git a/hbase-annotations/issue-tracking.html 
b/hbase-annotations/issue-tracking.html
index 0f49c95..8b943d3 100644
--- a/hbase-annotations/issue-tracking.html
+++ b/hbase-annotations/issue-tracking.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-26
+Last Published: 2017-05-27
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/hbase-annotations/license.html
--
diff --git a/hbase-annotations/license.html b/hbase-annotations/license.html
index e3d1755..52f3cd7 100644
--- a/hbase-annotations/license.html
+++ b/hbase-annotations/license.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-26
+Last Published: 2017-05-27
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/hbase-annotations/mail-lists.html
--
diff --git a/hbase-annotations/mail-lists.html 
b/hbase-annotations/mail-lists.html
index 3a48963..7c4 100644
--- a/hbase-annotations/mail-lists.html
+++ b/hbase-annotations/mail-lists.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-26
+Last Published: 2017-05-27
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/hbase-annotations/plugin-management.html
--
diff --git a/hbase-annotations/plugin-management.html 
b/hbase-annotations/plugin-management.html
index a6ed7b5..a251ac3 100644
--- a/hbase-annotations/plugin-management.html
+++ b/hbase-annotations/plugin-management.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-26
+Last Published: 2017-05-27
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/hbase-annotations/plugins.html
--
diff --git a/hbase-annotations/plugins.html b/hbase-annotations/plugins.html
index 11a8eb1..1676a2d 100644
--- a/hbase-annotations/plugins.html
+++ b/hbase-annotations/plugins.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-26
+Last Published: 2017-05-27
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Annotations

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/hbase-annotations/project-info.html
--
diff --git a/hbase-annotations/project-info.html 
b/hbase-annotations/project-info.html
index d04d166..0473542 100644
--- a/hbase-annotations/project-info.html
+++ b/hbase-annotations/project-info.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 

[21/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerRpcConnection.ByteBuffByteInput.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerRpcConnection.ByteBuffByteInput.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerRpcConnection.ByteBuffByteInput.html
index b363f06..790089d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerRpcConnection.ByteBuffByteInput.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/ServerRpcConnection.ByteBuffByteInput.html
@@ -37,854 +37,810 @@
 029import java.nio.channels.Channels;
 030import 
java.nio.channels.ReadableByteChannel;
 031import 
java.security.GeneralSecurityException;
-032import 
java.security.PrivilegedExceptionAction;
-033import java.util.Properties;
-034
-035import javax.security.sasl.Sasl;
-036import 
javax.security.sasl.SaslException;
-037import javax.security.sasl.SaslServer;
-038
-039import 
org.apache.commons.crypto.cipher.CryptoCipherFactory;
-040import 
org.apache.commons.crypto.random.CryptoRandom;
-041import 
org.apache.commons.crypto.random.CryptoRandomFactory;
-042import 
org.apache.hadoop.hbase.CellScanner;
-043import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-044import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-045import 
org.apache.hadoop.hbase.codec.Codec;
-046import 
org.apache.hadoop.hbase.io.ByteBufferOutputStream;
-047import 
org.apache.hadoop.hbase.io.crypto.aes.CryptoAES;
-048import 
org.apache.hadoop.hbase.ipc.RpcServer.CallCleanup;
-049import 
org.apache.hadoop.hbase.nio.ByteBuff;
-050import 
org.apache.hadoop.hbase.nio.SingleByteBuff;
-051import 
org.apache.hadoop.hbase.security.AccessDeniedException;
-052import 
org.apache.hadoop.hbase.security.AuthMethod;
-053import 
org.apache.hadoop.hbase.security.HBaseSaslRpcServer;
-054import 
org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslDigestCallbackHandler;
-055import 
org.apache.hadoop.hbase.security.HBaseSaslRpcServer.SaslGssCallbackHandler;
-056import 
org.apache.hadoop.hbase.security.SaslStatus;
-057import 
org.apache.hadoop.hbase.security.SaslUtil;
-058import 
org.apache.hadoop.hbase.security.User;
-059import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService;
-060import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteInput;
-061import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream;
-063import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
-064import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
-065import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
-066import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.VersionInfo;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos;
-070import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
-071import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
-072import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ResponseHeader;
-073import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.UserInformation;
-074import 
org.apache.hadoop.hbase.util.Bytes;
-075import 
org.apache.hadoop.io.BytesWritable;
-076import 
org.apache.hadoop.io.IntWritable;
-077import org.apache.hadoop.io.Writable;
-078import 
org.apache.hadoop.io.WritableUtils;
-079import 
org.apache.hadoop.io.compress.CompressionCodec;
-080import 
org.apache.hadoop.security.UserGroupInformation;
-081import 
org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
-082import 
org.apache.hadoop.security.authorize.AuthorizationException;
-083import 
org.apache.hadoop.security.authorize.ProxyUsers;
-084import 
org.apache.hadoop.security.token.SecretManager.InvalidToken;
-085import 
org.apache.hadoop.security.token.TokenIdentifier;
-086import org.apache.htrace.TraceInfo;
-087
-088/** Reads calls from a connection and 
queues them for handling. */
-089@edu.umd.cs.findbugs.annotations.SuppressWarnings(
-090value="VO_VOLATILE_INCREMENT",
-091justification="False positive 
according to http://sourceforge.net/p/findbugs/bugs/1032/;)
-092abstract class ServerRpcConnection 
implements Closeable {
-093  /**  */
-094  protected final RpcServer rpcServer;
-095  // If the connection header has been 
read or not.
-096  protected boolean connectionHeaderRead 
= false;
-097
-098  protected CallCleanup callCleanup;
-099
-100  // Cache the remote host  port 
info so that even if the socket is
-101  // disconnected, we can say where it 
used to connect to.
-102  protected String hostAddress;
-103  protected int remotePort;
-104  

[34/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
index 57a0fd0..8f6ff1c 100644
--- a/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/client/package-tree.html
@@ -524,24 +524,24 @@
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.client.IsolationLevel
-org.apache.hadoop.hbase.client.ScannerCallable.MoreResults
-org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState
+org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState
+org.apache.hadoop.hbase.client.Consistency
+org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.Retry
 org.apache.hadoop.hbase.client.SnapshotType
-org.apache.hadoop.hbase.client.TableState.State
-org.apache.hadoop.hbase.client.RegionLocateType
-org.apache.hadoop.hbase.client.MasterSwitchType
+org.apache.hadoop.hbase.client.CompactionState
 org.apache.hadoop.hbase.client.HBaseAdmin.ReplicationState
-org.apache.hadoop.hbase.client.Scan.ReadType
-org.apache.hadoop.hbase.client.Consistency
+org.apache.hadoop.hbase.client.AbstractResponse.ResponseType
+org.apache.hadoop.hbase.client.RequestController.ReturnCode
+org.apache.hadoop.hbase.client.ScannerCallable.MoreResults
 org.apache.hadoop.hbase.client.Durability
+org.apache.hadoop.hbase.client.TableState.State
 org.apache.hadoop.hbase.client.MobCompactPartitionPolicy
+org.apache.hadoop.hbase.client.RegionLocateType
+org.apache.hadoop.hbase.client.Scan.ReadType
+org.apache.hadoop.hbase.client.MasterSwitchType
 org.apache.hadoop.hbase.client.CompactType
-org.apache.hadoop.hbase.client.AsyncRequestFutureImpl.Retry
-org.apache.hadoop.hbase.client.AbstractResponse.ResponseType
-org.apache.hadoop.hbase.client.CompactionState
-org.apache.hadoop.hbase.client.RequestController.ReturnCode
 org.apache.hadoop.hbase.client.AsyncProcessTask.SubmittedRows
-org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanResumerState
+org.apache.hadoop.hbase.client.AsyncScanSingleRegionRpcRetryingCaller.ScanControllerState
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
index 0723bf0..bda44b6 100644
--- a/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/filter/package-tree.html
@@ -176,12 +176,12 @@
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.filter.FuzzyRowFilter.Order
-org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
 org.apache.hadoop.hbase.filter.Filter.ReturnCode
-org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
+org.apache.hadoop.hbase.filter.BitComparator.BitwiseOp
 org.apache.hadoop.hbase.filter.FilterList.Operator
 org.apache.hadoop.hbase.filter.CompareFilter.CompareOp
 org.apache.hadoop.hbase.filter.FilterWrapper.FilterRowRetCode
+org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType
 org.apache.hadoop.hbase.filter.FuzzyRowFilter.SatisfiesCode
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
index d7edef5..52bb8aa 100644
--- a/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/io/hfile/package-tree.html
@@ -273,12 +273,12 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements 

[11/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
index 9307cf8..58df534 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.ImplData.html
@@ -70,14 +70,14 @@
 062@org.jamon.annotations.Argument(name 
= "master", type = "HMaster")},
 063  optionalArguments = {
 064@org.jamon.annotations.Argument(name 
= "format", type = "String"),
-065@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
-066@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
-067@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
-068@org.jamon.annotations.Argument(name 
= "assignmentManager", type = "AssignmentManager"),
-069@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName"),
-070@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
-071@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
-072@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean")})
+065@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
+066@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
+067@org.jamon.annotations.Argument(name 
= "assignmentManager", type = "AssignmentManager"),
+068@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
+069@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
+070@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
+071@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean"),
+072@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName")})
 073public class MasterStatusTmpl
 074  extends 
org.jamon.AbstractTemplateProxy
 075{
@@ -135,142 +135,142 @@
 127  return m_format__IsNotDefault;
 128}
 129private boolean 
m_format__IsNotDefault;
-130// 21, 1
-131public void 
setFrags(MapString,Integer frags)
+130// 28, 1
+131public void 
setServerManager(ServerManager serverManager)
 132{
-133  // 21, 1
-134  m_frags = frags;
-135  m_frags__IsNotDefault = true;
+133  // 28, 1
+134  m_serverManager = serverManager;
+135  m_serverManager__IsNotDefault = 
true;
 136}
-137public MapString,Integer 
getFrags()
+137public ServerManager 
getServerManager()
 138{
-139  return m_frags;
+139  return m_serverManager;
 140}
-141private MapString,Integer 
m_frags;
-142public boolean 
getFrags__IsNotDefault()
+141private ServerManager 
m_serverManager;
+142public boolean 
getServerManager__IsNotDefault()
 143{
-144  return m_frags__IsNotDefault;
+144  return 
m_serverManager__IsNotDefault;
 145}
-146private boolean 
m_frags__IsNotDefault;
-147// 24, 1
-148public void 
setDeadServers(SetServerName deadServers)
+146private boolean 
m_serverManager__IsNotDefault;
+147// 26, 1
+148public void setFilter(String 
filter)
 149{
-150  // 24, 1
-151  m_deadServers = deadServers;
-152  m_deadServers__IsNotDefault = 
true;
+150  // 26, 1
+151  m_filter = filter;
+152  m_filter__IsNotDefault = true;
 153}
-154public SetServerName 
getDeadServers()
+154public String getFilter()
 155{
-156  return m_deadServers;
+156  return m_filter;
 157}
-158private SetServerName 
m_deadServers;
-159public boolean 
getDeadServers__IsNotDefault()
+158private String m_filter;
+159public boolean 
getFilter__IsNotDefault()
 160{
-161  return 
m_deadServers__IsNotDefault;
+161  return m_filter__IsNotDefault;
 162}
-163private boolean 
m_deadServers__IsNotDefault;
-164// 28, 1
-165public void 
setServerManager(ServerManager serverManager)
+163private boolean 
m_filter__IsNotDefault;
+164// 29, 1
+165public void 
setAssignmentManager(AssignmentManager assignmentManager)
 166{
-167  // 28, 1
-168  m_serverManager = serverManager;
-169  m_serverManager__IsNotDefault = 
true;
+167  // 29, 1
+168  m_assignmentManager = 
assignmentManager;
+169  m_assignmentManager__IsNotDefault = 
true;
 170}
-171public ServerManager 
getServerManager()
+171public AssignmentManager 
getAssignmentManager()
 172{
-173  return m_serverManager;
+173  return m_assignmentManager;
 174}
-175private ServerManager 

[29/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
index c3b12a1..1d04df1 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/package-tree.html
@@ -685,20 +685,20 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.regionserver.ImmutableSegment.Type
-org.apache.hadoop.hbase.regionserver.ScannerContext.NextState
-org.apache.hadoop.hbase.regionserver.StoreScanner.StoreScannerCompactionRace
-org.apache.hadoop.hbase.regionserver.Region.FlushResult.Result
 org.apache.hadoop.hbase.regionserver.BloomType
-org.apache.hadoop.hbase.regionserver.RegionOpeningState
-org.apache.hadoop.hbase.regionserver.FlushType
-org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
+org.apache.hadoop.hbase.regionserver.ScanType
 org.apache.hadoop.hbase.regionserver.Region.Operation
+org.apache.hadoop.hbase.regionserver.RegionOpeningState
+org.apache.hadoop.hbase.regionserver.Region.FlushResult.Result
+org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
 org.apache.hadoop.hbase.regionserver.MetricsRegionServerSourceFactoryImpl.FactoryStorage
+org.apache.hadoop.hbase.regionserver.StoreScanner.StoreScannerCompactionRace
+org.apache.hadoop.hbase.regionserver.SplitLogWorker.TaskExecutor.Status
+org.apache.hadoop.hbase.regionserver.ScannerContext.NextState
 org.apache.hadoop.hbase.regionserver.MemStoreCompactor.Action
-org.apache.hadoop.hbase.regionserver.ScanType
+org.apache.hadoop.hbase.regionserver.ImmutableSegment.Type
+org.apache.hadoop.hbase.regionserver.FlushType
 org.apache.hadoop.hbase.regionserver.DefaultHeapMemoryTuner.StepDirection
-org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
 
b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
index 7c07d66..a700851 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/regionserver/querymatcher/package-tree.html
@@ -125,10 +125,10 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.regionserver.querymatcher.StripeCompactionScanQueryMatcher.DropDeletesInOutput
+org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult
 org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteCompare
 org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode
-org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult
+org.apache.hadoop.hbase.regionserver.querymatcher.StripeCompactionScanQueryMatcher.DropDeletesInOutput
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
index 320500a..326107e 100644
--- a/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/regionserver/wal/package-tree.html
@@ -248,8 +248,8 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 

[22/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.html
index be47cd3..5bf8c01 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/ipc/RpcServer.html
@@ -38,763 +38,770 @@
 030import 
java.nio.channels.ReadableByteChannel;
 031import 
java.nio.channels.WritableByteChannel;
 032import java.util.ArrayList;
-033import java.util.HashMap;
-034import java.util.List;
-035import java.util.Map;
-036import 
java.util.concurrent.atomic.LongAdder;
-037
-038import org.apache.commons.logging.Log;
-039import 
org.apache.commons.logging.LogFactory;
-040import 
org.apache.hadoop.conf.Configuration;
-041import 
org.apache.hadoop.hbase.CallQueueTooBigException;
-042import 
org.apache.hadoop.hbase.CellScanner;
-043import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-044import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
-045import 
org.apache.hadoop.hbase.HConstants;
-046import org.apache.hadoop.hbase.Server;
-047import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-048import 
org.apache.hadoop.hbase.classification.InterfaceStability;
-049import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-050import 
org.apache.hadoop.hbase.exceptions.RequestTooBigException;
-051import 
org.apache.hadoop.hbase.io.ByteBufferPool;
-052import 
org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
-053import 
org.apache.hadoop.hbase.monitoring.TaskMonitor;
-054import 
org.apache.hadoop.hbase.nio.ByteBuff;
-055import 
org.apache.hadoop.hbase.nio.MultiByteBuff;
-056import 
org.apache.hadoop.hbase.nio.SingleByteBuff;
-057import 
org.apache.hadoop.hbase.regionserver.RSRpcServices;
-058import 
org.apache.hadoop.hbase.security.HBaseSaslRpcServer;
-059import 
org.apache.hadoop.hbase.security.User;
-060import 
org.apache.hadoop.hbase.security.UserProvider;
-061import 
org.apache.hadoop.hbase.security.token.AuthenticationTokenSecretManager;
-062import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService;
-063import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
-064import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
-065import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
-066import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
-067import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-068import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
-069import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.ConnectionHeader;
-070import 
org.apache.hadoop.hbase.util.Pair;
-071import 
org.apache.hadoop.security.UserGroupInformation;
-072import 
org.apache.hadoop.security.authorize.AuthorizationException;
-073import 
org.apache.hadoop.security.authorize.PolicyProvider;
-074import 
org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
-075import 
org.apache.hadoop.security.token.SecretManager;
-076import 
org.apache.hadoop.security.token.TokenIdentifier;
-077import 
org.codehaus.jackson.map.ObjectMapper;
-078
-079/**
-080 * An RPC server that hosts protobuf 
described Services.
-081 *
-082 */
-083@InterfaceAudience.LimitedPrivate({HBaseInterfaceAudience.COPROC,
 HBaseInterfaceAudience.PHOENIX})
-084@InterfaceStability.Evolving
-085public abstract class RpcServer 
implements RpcServerInterface,
-086ConfigurationObserver {
-087  // LOG is being used in CallRunner and 
the log level is being changed in tests
-088  public static final Log LOG = 
LogFactory.getLog(RpcServer.class);
-089  protected static final 
CallQueueTooBigException CALL_QUEUE_TOO_BIG_EXCEPTION
-090  = new CallQueueTooBigException();
-091
-092  private final boolean authorize;
-093  protected boolean isSecurityEnabled;
+033import java.util.Collections;
+034import java.util.HashMap;
+035import java.util.List;
+036import java.util.Locale;
+037import java.util.Map;
+038import 
java.util.concurrent.atomic.LongAdder;
+039
+040import org.apache.commons.logging.Log;
+041import 
org.apache.commons.logging.LogFactory;
+042import 
org.apache.hadoop.conf.Configuration;
+043import 
org.apache.hadoop.hbase.CallQueueTooBigException;
+044import 
org.apache.hadoop.hbase.CellScanner;
+045import 
org.apache.hadoop.hbase.DoNotRetryIOException;
+046import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
+047import 
org.apache.hadoop.hbase.HConstants;
+048import org.apache.hadoop.hbase.Server;
+049import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
+050import 
org.apache.hadoop.hbase.classification.InterfaceStability;
+051import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
+052import 
org.apache.hadoop.hbase.exceptions.RequestTooBigException;
+053import 

[27/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.Modify.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.Modify.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.Modify.html
index ac13492..ce4327f 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.Modify.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.Modify.html
@@ -1101,284 +1101,280 @@
 1093
"hbase.regionserver.wal.enablecompression";
 1094
 1095  /** Configuration name of WAL storage 
policy
-1096   * Valid values are:
-1097   *  NONE: no preference in destination 
of block replicas
-1098   *  ONE_SSD: place only one block 
replica in SSD and the remaining in default storage
-1099   *  and ALL_SSD: place all block 
replicas on SSD
-1100   *
-1101   * See 
http://hadoop.apache.org/docs/r2.6.0/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html*/
-1102  public static final String 
WAL_STORAGE_POLICY = "hbase.wal.storage.policy";
-1103  public static final String 
DEFAULT_WAL_STORAGE_POLICY = "NONE";
+1096   * Valid values are: HOT, COLD, WARM, 
ALL_SSD, ONE_SSD, LAZY_PERSIST
+1097   * See 
http://hadoop.apache.org/docs/r2.7.3/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html*/
+1098  public static final String 
WAL_STORAGE_POLICY = "hbase.wal.storage.policy";
+1099  public static final String 
DEFAULT_WAL_STORAGE_POLICY = "HOT";
+1100
+1101  /** Region in Transition metrics 
threshold time */
+1102  public static final String 
METRICS_RIT_STUCK_WARNING_THRESHOLD =
+1103  
"hbase.metrics.rit.stuck.warning.threshold";
 1104
-1105  /** Region in Transition metrics 
threshold time */
-1106  public static final String 
METRICS_RIT_STUCK_WARNING_THRESHOLD =
-1107  
"hbase.metrics.rit.stuck.warning.threshold";
-1108
-1109  public static final String 
LOAD_BALANCER_SLOP_KEY = "hbase.regions.slop";
-1110
-  /** delimiter used between portions of 
a region name */
-1112  public static final int DELIMITER = 
',';
-1113
-1114  /**
-1115   * QOS attributes: these attributes 
are used to demarcate RPC call processing
-1116   * by different set of handlers. For 
example, HIGH_QOS tagged methods are
-1117   * handled by high priority 
handlers.
-1118   */
-1119  // normal_QOS  replication_QOS 
 replay_QOS  QOS_threshold  admin_QOS  high_QOS
-1120  public static final int NORMAL_QOS = 
0;
-1121  public static final int 
REPLICATION_QOS = 5;
-1122  public static final int REPLAY_QOS = 
6;
-1123  public static final int QOS_THRESHOLD 
= 10;
-1124  public static final int ADMIN_QOS = 
100;
-1125  public static final int HIGH_QOS = 
200;
-1126  public static final int 
SYSTEMTABLE_QOS = HIGH_QOS;
-1127
-1128  /** Directory under /hbase where 
archived hfiles are stored */
-1129  public static final String 
HFILE_ARCHIVE_DIRECTORY = "archive";
-1130
-1131  /**
-1132   * Name of the directory to store all 
snapshots. See SnapshotDescriptionUtils for
-1133   * remaining snapshot constants; this 
is here to keep HConstants dependencies at a minimum and
-1134   * uni-directional.
-1135   */
-1136  public static final String 
SNAPSHOT_DIR_NAME = ".hbase-snapshot";
-1137
-1138  /* Name of old snapshot directory. See 
HBASE-8352 for details on why it needs to be renamed */
-1139  public static final String 
OLD_SNAPSHOT_DIR_NAME = ".snapshot";
-1140
-1141  /** Temporary directory used for table 
creation and deletion */
-1142  public static final String 
HBASE_TEMP_DIRECTORY = ".tmp";
-1143  /**
-1144   * The period (in milliseconds) 
between computing region server point in time metrics
-1145   */
-1146  public static final String 
REGIONSERVER_METRICS_PERIOD = "hbase.regionserver.metrics.period";
-1147  public static final long 
DEFAULT_REGIONSERVER_METRICS_PERIOD = 5000;
-1148  /** Directories that are not HBase 
table directories */
-1149  public static final ListString 
HBASE_NON_TABLE_DIRS =
-1150
Collections.unmodifiableList(Arrays.asList(new String[] {
-1151  HBCK_SIDELINEDIR_NAME, 
HBASE_TEMP_DIRECTORY, MIGRATION_NAME
-1152}));
-1153
-1154  /** Directories that are not HBase 
user table directories */
-1155  public static final ListString 
HBASE_NON_USER_TABLE_DIRS =
-1156
Collections.unmodifiableList(Arrays.asList((String[])ArrayUtils.addAll(
-1157  new String[] { 
TableName.META_TABLE_NAME.getNameAsString() },
-1158  
HBASE_NON_TABLE_DIRS.toArray(;
-1159
-1160  /** Health script related settings. 
*/
-1161  public static final String 
HEALTH_SCRIPT_LOC = "hbase.node.health.script.location";
-1162  public static final String 
HEALTH_SCRIPT_TIMEOUT = "hbase.node.health.script.timeout";
-1163  public static final String 
HEALTH_CHORE_WAKE_FREQ =
-1164  
"hbase.node.health.script.frequency";
-1165  public static final long 
DEFAULT_HEALTH_SCRIPT_TIMEOUT = 6;
-1166  /**
-1167   * The maximum number of health 

[17/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
index 7aeb0fb..f01cf3a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerShippedCallBack.html
@@ -27,3384 +27,3404 @@
 019package 
org.apache.hadoop.hbase.regionserver;
 020
 021import 
com.google.common.annotations.VisibleForTesting;
-022
-023import java.io.FileNotFoundException;
-024import java.io.IOException;
-025import java.io.InterruptedIOException;
-026import java.net.BindException;
-027import java.net.InetSocketAddress;
-028import java.net.UnknownHostException;
-029import java.nio.ByteBuffer;
-030import java.util.*;
-031import java.util.Map.Entry;
-032import 
java.util.concurrent.ConcurrentHashMap;
-033import 
java.util.concurrent.ConcurrentMap;
-034import 
java.util.concurrent.atomic.AtomicBoolean;
-035import 
java.util.concurrent.atomic.AtomicLong;
-036import 
java.util.concurrent.atomic.LongAdder;
-037
-038import 
org.apache.commons.lang.mutable.MutableObject;
-039import org.apache.commons.logging.Log;
-040import 
org.apache.commons.logging.LogFactory;
-041import 
org.apache.hadoop.conf.Configuration;
-042import org.apache.hadoop.fs.Path;
-043import 
org.apache.hadoop.hbase.ByteBufferCell;
-044import org.apache.hadoop.hbase.Cell;
-045import 
org.apache.hadoop.hbase.CellScannable;
-046import 
org.apache.hadoop.hbase.CellScanner;
-047import 
org.apache.hadoop.hbase.CellUtil;
-048import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-049import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-050import 
org.apache.hadoop.hbase.HBaseIOException;
-051import 
org.apache.hadoop.hbase.HConstants;
-052import 
org.apache.hadoop.hbase.HRegionInfo;
-053import 
org.apache.hadoop.hbase.HTableDescriptor;
-054import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-055import 
org.apache.hadoop.hbase.NotServingRegionException;
-056import 
org.apache.hadoop.hbase.ServerName;
-057import 
org.apache.hadoop.hbase.TableName;
-058import 
org.apache.hadoop.hbase.UnknownScannerException;
-059import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-060import 
org.apache.hadoop.hbase.client.Append;
-061import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-062import 
org.apache.hadoop.hbase.client.Delete;
-063import 
org.apache.hadoop.hbase.client.Durability;
-064import 
org.apache.hadoop.hbase.client.Get;
-065import 
org.apache.hadoop.hbase.client.Increment;
-066import 
org.apache.hadoop.hbase.client.Mutation;
-067import 
org.apache.hadoop.hbase.client.Put;
-068import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-069import 
org.apache.hadoop.hbase.client.Result;
-070import 
org.apache.hadoop.hbase.client.RowMutations;
-071import 
org.apache.hadoop.hbase.client.Scan;
-072import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-073import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-074import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-075import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-076import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-077import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-078import 
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-079import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-080import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-081import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-082import 
org.apache.hadoop.hbase.ipc.QosPriority;
-083import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-084import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-085import 
org.apache.hadoop.hbase.ipc.RpcServer;
-086import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-087import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-088import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-089import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-090import 
org.apache.hadoop.hbase.ipc.ServerRpcController;
-091import 
org.apache.hadoop.hbase.master.MasterRpcServices;
-092import 
org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;
-093import 
org.apache.hadoop.hbase.quotas.OperationQuota;
-094import 
org.apache.hadoop.hbase.quotas.QuotaUtil;
-095import 
org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
-096import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-097import 
org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;
-098import 
org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;
-099import 

[16/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
index 7aeb0fb..f01cf3a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannersCloseCallBack.html
@@ -27,3384 +27,3404 @@
 019package 
org.apache.hadoop.hbase.regionserver;
 020
 021import 
com.google.common.annotations.VisibleForTesting;
-022
-023import java.io.FileNotFoundException;
-024import java.io.IOException;
-025import java.io.InterruptedIOException;
-026import java.net.BindException;
-027import java.net.InetSocketAddress;
-028import java.net.UnknownHostException;
-029import java.nio.ByteBuffer;
-030import java.util.*;
-031import java.util.Map.Entry;
-032import 
java.util.concurrent.ConcurrentHashMap;
-033import 
java.util.concurrent.ConcurrentMap;
-034import 
java.util.concurrent.atomic.AtomicBoolean;
-035import 
java.util.concurrent.atomic.AtomicLong;
-036import 
java.util.concurrent.atomic.LongAdder;
-037
-038import 
org.apache.commons.lang.mutable.MutableObject;
-039import org.apache.commons.logging.Log;
-040import 
org.apache.commons.logging.LogFactory;
-041import 
org.apache.hadoop.conf.Configuration;
-042import org.apache.hadoop.fs.Path;
-043import 
org.apache.hadoop.hbase.ByteBufferCell;
-044import org.apache.hadoop.hbase.Cell;
-045import 
org.apache.hadoop.hbase.CellScannable;
-046import 
org.apache.hadoop.hbase.CellScanner;
-047import 
org.apache.hadoop.hbase.CellUtil;
-048import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-049import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-050import 
org.apache.hadoop.hbase.HBaseIOException;
-051import 
org.apache.hadoop.hbase.HConstants;
-052import 
org.apache.hadoop.hbase.HRegionInfo;
-053import 
org.apache.hadoop.hbase.HTableDescriptor;
-054import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-055import 
org.apache.hadoop.hbase.NotServingRegionException;
-056import 
org.apache.hadoop.hbase.ServerName;
-057import 
org.apache.hadoop.hbase.TableName;
-058import 
org.apache.hadoop.hbase.UnknownScannerException;
-059import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-060import 
org.apache.hadoop.hbase.client.Append;
-061import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-062import 
org.apache.hadoop.hbase.client.Delete;
-063import 
org.apache.hadoop.hbase.client.Durability;
-064import 
org.apache.hadoop.hbase.client.Get;
-065import 
org.apache.hadoop.hbase.client.Increment;
-066import 
org.apache.hadoop.hbase.client.Mutation;
-067import 
org.apache.hadoop.hbase.client.Put;
-068import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-069import 
org.apache.hadoop.hbase.client.Result;
-070import 
org.apache.hadoop.hbase.client.RowMutations;
-071import 
org.apache.hadoop.hbase.client.Scan;
-072import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-073import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-074import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-075import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-076import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-077import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-078import 
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-079import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-080import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-081import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-082import 
org.apache.hadoop.hbase.ipc.QosPriority;
-083import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-084import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-085import 
org.apache.hadoop.hbase.ipc.RpcServer;
-086import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-087import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-088import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-089import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-090import 
org.apache.hadoop.hbase.ipc.ServerRpcController;
-091import 
org.apache.hadoop.hbase.master.MasterRpcServices;
-092import 
org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;
-093import 
org.apache.hadoop.hbase.quotas.OperationQuota;
-094import 
org.apache.hadoop.hbase.quotas.QuotaUtil;
-095import 
org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
-096import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-097import 
org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;
-098import 
org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;
-099import 

[31/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/org/apache/hadoop/hbase/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/package-tree.html
index bf111c5..374286c 100644
--- a/devapidocs/org/apache/hadoop/hbase/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/package-tree.html
@@ -422,15 +422,15 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.MetaTableAccessor.QueryType
-org.apache.hadoop.hbase.HConstants.Modify
 org.apache.hadoop.hbase.HConstants.OperationStatusCode
-org.apache.hadoop.hbase.ProcedureState
-org.apache.hadoop.hbase.HealthChecker.HealthCheckerExitStatus
-org.apache.hadoop.hbase.MemoryCompactionPolicy
 org.apache.hadoop.hbase.Coprocessor.State
 org.apache.hadoop.hbase.KeepDeletedCells
+org.apache.hadoop.hbase.MemoryCompactionPolicy
+org.apache.hadoop.hbase.HConstants.Modify
 org.apache.hadoop.hbase.CompatibilitySingletonFactory.SingletonStorage
+org.apache.hadoop.hbase.MetaTableAccessor.QueryType
+org.apache.hadoop.hbase.HealthChecker.HealthCheckerExitStatus
+org.apache.hadoop.hbase.ProcedureState
 org.apache.hadoop.hbase.KeyValue.Type
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
index 5938587..6db1eb9 100644
--- a/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/procedure2/package-tree.html
@@ -191,11 +191,11 @@
 
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
-org.apache.hadoop.hbase.procedure2.LockInfo.LockType
-org.apache.hadoop.hbase.procedure2.RootProcedureState.State
 org.apache.hadoop.hbase.procedure2.StateMachineProcedure.Flow
-org.apache.hadoop.hbase.procedure2.LockInfo.ResourceType
+org.apache.hadoop.hbase.procedure2.RootProcedureState.State
 org.apache.hadoop.hbase.procedure2.Procedure.LockState
+org.apache.hadoop.hbase.procedure2.LockInfo.LockType
+org.apache.hadoop.hbase.procedure2.LockInfo.ResourceType
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html 
b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
index 5421ba1..48f0943 100644
--- a/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
+++ b/devapidocs/org/apache/hadoop/hbase/quotas/package-tree.html
@@ -199,12 +199,12 @@
 java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumE (implements java.lang.http://docs.oracle.com/javase/8/docs/api/java/lang/Comparable.html?is-external=true;
 title="class or interface in java.lang">ComparableT, java.io.http://docs.oracle.com/javase/8/docs/api/java/io/Serializable.html?is-external=true;
 title="class or interface in java.io">Serializable)
 
 org.apache.hadoop.hbase.quotas.QuotaScope
-org.apache.hadoop.hbase.quotas.QuotaSnapshotStore.ViolationState
-org.apache.hadoop.hbase.quotas.ThrottleType
 org.apache.hadoop.hbase.quotas.ThrottlingException.Type
-org.apache.hadoop.hbase.quotas.OperationQuota.OperationType
-org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
 org.apache.hadoop.hbase.quotas.QuotaType
+org.apache.hadoop.hbase.quotas.ThrottleType
+org.apache.hadoop.hbase.quotas.SpaceViolationPolicy
+org.apache.hadoop.hbase.quotas.OperationQuota.OperationType
+org.apache.hadoop.hbase.quotas.QuotaSnapshotStore.ViolationState
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
--
diff --git 

[37/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index 64b7d66..50a7ffe 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -289,7 +289,7 @@
 2203
 0
 0
-14460
+14436
 
 Files
 
@@ -2547,7 +2547,7 @@
 org/apache/hadoop/hbase/ipc/ServerRpcConnection.java
 0
 0
-42
+22
 
 org/apache/hadoop/hbase/ipc/SimpleRpcScheduler.java
 0
@@ -4302,7 +4302,7 @@
 org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 0
 0
-124
+122
 
 org/apache/hadoop/hbase/regionserver/RSStatusServlet.java
 0
@@ -5394,1736 +5394,1731 @@
 0
 4
 
-org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java
-0
-0
-2
-
 org/apache/hadoop/hbase/security/SaslStatus.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/security/SaslUtil.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/SecurityInfo.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/SecurityUtil.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/Superusers.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/security/User.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/security/UserProvider.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/access/AccessControlClient.java
 0
 0
 48
-
+
 org/apache/hadoop/hbase/security/access/AccessControlConstants.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/access/AccessControlFilter.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/security/access/AccessControlLists.java
 0
 0
 18
-
+
 org/apache/hadoop/hbase/security/access/AccessControlUtil.java
 0
 0
 41
-
+
 org/apache/hadoop/hbase/security/access/AccessController.java
 0
 0
 34
-
+
 org/apache/hadoop/hbase/security/access/AuthResult.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/security/access/Permission.java
 0
 0
 7
-
+
 org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/security/access/ShadedAccessControlUtil.java
 0
 0
 48
-
+
 org/apache/hadoop/hbase/security/access/TableAuthManager.java
 0
 0
 41
-
+
 org/apache/hadoop/hbase/security/access/TablePermission.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/security/access/UserPermission.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
 0
 0
 6
-
+
 org/apache/hadoop/hbase/security/token/AuthenticationKey.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/token/AuthenticationTokenSecretManager.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/security/token/FsDelegationToken.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/token/TokenProvider.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/security/token/TokenUtil.java
 0
 0
 4
-
+
 org/apache/hadoop/hbase/security/token/ZKSecretWatcher.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/visibility/CellVisibility.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/security/visibility/DefaultVisibilityLabelServiceImpl.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/security/visibility/DefinedSetFilterScanLabelGenerator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/visibility/EnforcingScanLabelGenerator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/visibility/ExpressionParser.java
 0
 0
 8
-
+
 org/apache/hadoop/hbase/security/visibility/FeedUserAuthScanLabelGenerator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/visibility/ScanLabelGenerator.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/security/visibility/SimpleScanLabelGenerator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
 0
 0
 84
-
+
 org/apache/hadoop/hbase/security/visibility/VisibilityConstants.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/visibility/VisibilityController.java
 0
 0
 11
-
+
 org/apache/hadoop/hbase/security/visibility/VisibilityExpEvaluator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/visibility/VisibilityLabelFilter.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/visibility/VisibilityLabelServiceManager.java
 0
 0
 3
-
+
 org/apache/hadoop/hbase/security/visibility/VisibilityLabelsCache.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/security/visibility/VisibilityLabelsValidator.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/visibility/VisibilityReplicationEndpoint.java
 0
 0
 1
-
+
 org/apache/hadoop/hbase/security/visibility/VisibilityScanDeleteTracker.java
 0
 0
 49
-
+
 org/apache/hadoop/hbase/security/visibility/VisibilityUtils.java
 0
 0
 9
-
+
 org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.java
 0
 0
 2
-
+
 org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractMessage.java
 0
 0
 5
-
+
 org/apache/hadoop/hbase/shaded/com/google/protobuf/AbstractParser.java
 0
 0
 1
-
+
 

[26/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
index ac13492..ce4327f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
@@ -1101,284 +1101,280 @@
 1093
"hbase.regionserver.wal.enablecompression";
 1094
 1095  /** Configuration name of WAL storage 
policy
-1096   * Valid values are:
-1097   *  NONE: no preference in destination 
of block replicas
-1098   *  ONE_SSD: place only one block 
replica in SSD and the remaining in default storage
-1099   *  and ALL_SSD: place all block 
replicas on SSD
-1100   *
-1101   * See 
http://hadoop.apache.org/docs/r2.6.0/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html*/
-1102  public static final String 
WAL_STORAGE_POLICY = "hbase.wal.storage.policy";
-1103  public static final String 
DEFAULT_WAL_STORAGE_POLICY = "NONE";
+1096   * Valid values are: HOT, COLD, WARM, 
ALL_SSD, ONE_SSD, LAZY_PERSIST
+1097   * See 
http://hadoop.apache.org/docs/r2.7.3/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html*/
+1098  public static final String 
WAL_STORAGE_POLICY = "hbase.wal.storage.policy";
+1099  public static final String 
DEFAULT_WAL_STORAGE_POLICY = "HOT";
+1100
+1101  /** Region in Transition metrics 
threshold time */
+1102  public static final String 
METRICS_RIT_STUCK_WARNING_THRESHOLD =
+1103  
"hbase.metrics.rit.stuck.warning.threshold";
 1104
-1105  /** Region in Transition metrics 
threshold time */
-1106  public static final String 
METRICS_RIT_STUCK_WARNING_THRESHOLD =
-1107  
"hbase.metrics.rit.stuck.warning.threshold";
-1108
-1109  public static final String 
LOAD_BALANCER_SLOP_KEY = "hbase.regions.slop";
-1110
-  /** delimiter used between portions of 
a region name */
-1112  public static final int DELIMITER = 
',';
-1113
-1114  /**
-1115   * QOS attributes: these attributes 
are used to demarcate RPC call processing
-1116   * by different set of handlers. For 
example, HIGH_QOS tagged methods are
-1117   * handled by high priority 
handlers.
-1118   */
-1119  // normal_QOS  replication_QOS 
 replay_QOS  QOS_threshold  admin_QOS  high_QOS
-1120  public static final int NORMAL_QOS = 
0;
-1121  public static final int 
REPLICATION_QOS = 5;
-1122  public static final int REPLAY_QOS = 
6;
-1123  public static final int QOS_THRESHOLD 
= 10;
-1124  public static final int ADMIN_QOS = 
100;
-1125  public static final int HIGH_QOS = 
200;
-1126  public static final int 
SYSTEMTABLE_QOS = HIGH_QOS;
-1127
-1128  /** Directory under /hbase where 
archived hfiles are stored */
-1129  public static final String 
HFILE_ARCHIVE_DIRECTORY = "archive";
-1130
-1131  /**
-1132   * Name of the directory to store all 
snapshots. See SnapshotDescriptionUtils for
-1133   * remaining snapshot constants; this 
is here to keep HConstants dependencies at a minimum and
-1134   * uni-directional.
-1135   */
-1136  public static final String 
SNAPSHOT_DIR_NAME = ".hbase-snapshot";
-1137
-1138  /* Name of old snapshot directory. See 
HBASE-8352 for details on why it needs to be renamed */
-1139  public static final String 
OLD_SNAPSHOT_DIR_NAME = ".snapshot";
-1140
-1141  /** Temporary directory used for table 
creation and deletion */
-1142  public static final String 
HBASE_TEMP_DIRECTORY = ".tmp";
-1143  /**
-1144   * The period (in milliseconds) 
between computing region server point in time metrics
-1145   */
-1146  public static final String 
REGIONSERVER_METRICS_PERIOD = "hbase.regionserver.metrics.period";
-1147  public static final long 
DEFAULT_REGIONSERVER_METRICS_PERIOD = 5000;
-1148  /** Directories that are not HBase 
table directories */
-1149  public static final ListString 
HBASE_NON_TABLE_DIRS =
-1150
Collections.unmodifiableList(Arrays.asList(new String[] {
-1151  HBCK_SIDELINEDIR_NAME, 
HBASE_TEMP_DIRECTORY, MIGRATION_NAME
-1152}));
-1153
-1154  /** Directories that are not HBase 
user table directories */
-1155  public static final ListString 
HBASE_NON_USER_TABLE_DIRS =
-1156
Collections.unmodifiableList(Arrays.asList((String[])ArrayUtils.addAll(
-1157  new String[] { 
TableName.META_TABLE_NAME.getNameAsString() },
-1158  
HBASE_NON_TABLE_DIRS.toArray(;
-1159
-1160  /** Health script related settings. 
*/
-1161  public static final String 
HEALTH_SCRIPT_LOC = "hbase.node.health.script.location";
-1162  public static final String 
HEALTH_SCRIPT_TIMEOUT = "hbase.node.health.script.timeout";
-1163  public static final String 
HEALTH_CHORE_WAKE_FREQ =
-1164  
"hbase.node.health.script.frequency";
-1165  public static final long 

[04/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.html
index 8f41f08d..d6ee5b4 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.html
@@ -34,7 +34,7 @@
 026import 
org.apache.hadoop.hbase.procedure.SimpleMasterProcedureManager;
 027import 
org.apache.hadoop.hbase.procedure.SimpleRSProcedureManager;
 028import 
org.apache.hadoop.hbase.testclassification.ClientTests;
-029import 
org.apache.hadoop.hbase.testclassification.MediumTests;
+029import 
org.apache.hadoop.hbase.testclassification.LargeTests;
 030import 
org.apache.hadoop.hbase.util.Bytes;
 031import org.junit.Assert;
 032import org.junit.BeforeClass;
@@ -45,85 +45,86 @@
 037import java.util.Map;
 038import java.util.Random;
 039
-040import static 
org.junit.Assert.assertArrayEquals;
-041import static 
org.junit.Assert.assertFalse;
-042import static 
org.junit.Assert.assertTrue;
-043
-044/**
-045 * Class to test asynchronous procedure 
admin operations.
-046 */
-047@Category({ MediumTests.class, 
ClientTests.class })
-048public class TestAsyncProcedureAdminApi 
extends TestAsyncAdminBase {
-049
-050  @BeforeClass
-051  public static void setUpBeforeClass() 
throws Exception {
-052
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_PAUSE, 10);
-053
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 
3);
-054
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 1000);
-055
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 
3000);
-056
TEST_UTIL.getConfiguration().set(ProcedureManagerHost.MASTER_PROCEDURE_CONF_KEY,
-057  
SimpleMasterProcedureManager.class.getName());
-058
TEST_UTIL.getConfiguration().set(ProcedureManagerHost.REGIONSERVER_PROCEDURE_CONF_KEY,
-059  
SimpleRSProcedureManager.class.getName());
-060
TEST_UTIL.getConfiguration().setBoolean(SnapshotManager.HBASE_SNAPSHOT_ENABLED, 
true);
-061TEST_UTIL.startMiniCluster(2);
-062ASYNC_CONN = 
ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();
-063  }
-064
-065  @Test
-066  public void testExecProcedure() throws 
Exception {
-067TableName tableName = 
TableName.valueOf("testExecProcedure");
-068try {
-069  Table table = 
TEST_UTIL.createTable(tableName, Bytes.toBytes("cf"));
-070  for (int i = 0; i  100; i++) 
{
-071Put put = new 
Put(Bytes.toBytes(i)).addColumn(Bytes.toBytes("cf"), null, Bytes.toBytes(i));
-072table.put(put);
-073  }
-074  // take a snapshot of the enabled 
table
-075  String snapshotString = 
"offlineTableSnapshot";
-076  MapString, String props = 
new HashMap();
-077  props.put("table", 
tableName.getNameAsString());
-078  
admin.execProcedure(SnapshotManager.ONLINE_SNAPSHOT_CONTROLLER_DESCRIPTION, 
snapshotString,
-079props).get();
-080  LOG.debug("Snapshot completed.");
-081} finally {
-082  TEST_UTIL.deleteTable(tableName);
-083}
-084  }
-085
-086  @Test
-087  public void testExecProcedureWithRet() 
throws Exception {
-088byte[] result = 
admin.execProcedureWithRet(SimpleMasterProcedureManager.SIMPLE_SIGNATURE,
-089  "myTest2", new 
HashMap()).get();
-090assertArrayEquals("Incorrect return 
data from execProcedure",
-091  
SimpleMasterProcedureManager.SIMPLE_DATA.getBytes(), result);
-092  }
-093
-094  @Test
-095  public void listProcedure() throws 
Exception {
-096ProcedureInfo[] procList = 
admin.listProcedures().get();
-097assertTrue(procList.length = 
0);
-098  }
-099
-100  @Test
-101  public void isProcedureFinished() 
throws Exception {
-102boolean failed = false;
-103try {
-104  
admin.isProcedureFinished("fake-signature", "fake-instance", new 
HashMap()).get();
-105} catch (Exception e) {
-106  failed = true;
-107}
-108Assert.assertTrue(failed);
-109  }
-110
-111  @Test
-112  public void abortProcedure() throws 
Exception {
-113Random randomGenerator = new 
Random();
-114long procId = 
randomGenerator.nextLong();
-115boolean abortResult = 
admin.abortProcedure(procId, true).get();
-116assertFalse(abortResult);
-117  }
-118}
+040import static 
org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY;
+041import static 
org.junit.Assert.assertArrayEquals;
+042import static 
org.junit.Assert.assertFalse;
+043import static 
org.junit.Assert.assertTrue;
+044
+045/**
+046 * Class to test asynchronous procedure 
admin operations.
+047 */
+048@Category({ LargeTests.class, 

[13/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.SaslDigestCallbackHandler.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.SaslDigestCallbackHandler.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.SaslDigestCallbackHandler.html
index 2aea531..4ff3ed5 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.SaslDigestCallbackHandler.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.SaslDigestCallbackHandler.html
@@ -6,7 +6,7 @@
 
 
 
-001/*
+001/**
 002 * Licensed to the Apache Software 
Foundation (ASF) under one
 003 * or more contributor license 
agreements.  See the NOTICE file
 004 * distributed with this work for 
additional information
@@ -23,169 +23,225 @@
 015 * See the License for the specific 
language governing permissions and
 016 * limitations under the License.
 017 */
-018
-019package 
org.apache.hadoop.hbase.security;
-020
-021import java.io.ByteArrayInputStream;
-022import java.io.DataInputStream;
-023import java.io.IOException;
-024import java.util.Locale;
-025import java.util.Map;
-026import java.util.function.Consumer;
-027
-028import 
javax.security.auth.callback.Callback;
-029import 
javax.security.auth.callback.CallbackHandler;
-030import 
javax.security.auth.callback.NameCallback;
-031import 
javax.security.auth.callback.PasswordCallback;
-032import 
javax.security.auth.callback.UnsupportedCallbackException;
-033import 
javax.security.sasl.AuthorizeCallback;
-034import 
javax.security.sasl.RealmCallback;
-035
-036import org.apache.commons.logging.Log;
-037import 
org.apache.commons.logging.LogFactory;
-038import 
org.apache.hadoop.conf.Configuration;
+018package 
org.apache.hadoop.hbase.security;
+019
+020import java.io.ByteArrayInputStream;
+021import java.io.DataInputStream;
+022import java.io.IOException;
+023import 
java.security.PrivilegedExceptionAction;
+024import java.util.Map;
+025
+026import 
javax.security.auth.callback.Callback;
+027import 
javax.security.auth.callback.CallbackHandler;
+028import 
javax.security.auth.callback.NameCallback;
+029import 
javax.security.auth.callback.PasswordCallback;
+030import 
javax.security.auth.callback.UnsupportedCallbackException;
+031import 
javax.security.sasl.AuthorizeCallback;
+032import 
javax.security.sasl.RealmCallback;
+033import javax.security.sasl.Sasl;
+034import 
javax.security.sasl.SaslException;
+035import javax.security.sasl.SaslServer;
+036
+037import org.apache.commons.logging.Log;
+038import 
org.apache.commons.logging.LogFactory;
 039import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-040import 
org.apache.hadoop.hbase.security.SaslUtil.QualityOfProtection;
-041import 
org.apache.hadoop.security.UserGroupInformation;
-042import 
org.apache.hadoop.security.token.SecretManager;
-043import 
org.apache.hadoop.security.token.SecretManager.InvalidToken;
-044import 
org.apache.hadoop.security.token.TokenIdentifier;
-045
-046/**
-047 * A utility class for dealing with SASL 
on RPC server
+040import 
org.apache.hadoop.security.UserGroupInformation;
+041import 
org.apache.hadoop.security.token.SecretManager;
+042import 
org.apache.hadoop.security.token.SecretManager.InvalidToken;
+043import 
org.apache.hadoop.security.token.TokenIdentifier;
+044
+045/**
+046 * A utility class that encapsulates SASL 
logic for RPC server. Copied from
+047 * 
codeorg.apache.hadoop.security/code
 048 */
 049@InterfaceAudience.Private
 050public class HBaseSaslRpcServer {
-051  private static final Log LOG = 
LogFactory.getLog(HBaseSaslRpcServer.class);
-052
-053  private static MapString, 
String saslProps = null;
-054
-055  public static void init(Configuration 
conf) {
-056saslProps = 
SaslUtil.initSaslProperties(conf.get("hbase.rpc.protection",
-057  
QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)));
-058  }
-059
-060  public static MapString, String 
getSaslProps() {
-061return saslProps;
-062  }
-063
-064  public static T extends 
TokenIdentifier T getIdentifier(String id,
-065  SecretManagerT 
secretManager) throws InvalidToken {
-066byte[] tokenId = 
SaslUtil.decodeIdentifier(id);
-067T tokenIdentifier = 
secretManager.createIdentifier();
-068try {
-069  tokenIdentifier.readFields(new 
DataInputStream(new ByteArrayInputStream(
-070  tokenId)));
-071} catch (IOException e) {
-072  throw (InvalidToken) new 
InvalidToken(
-073  "Can't de-serialize 
tokenIdentifier").initCause(e);
-074}
-075return tokenIdentifier;
-076  }
-077
-078
-079  /** CallbackHandler for SASL DIGEST-MD5 
mechanism */
-080  public static class 
SaslDigestCallbackHandler implements CallbackHandler {
-081private 
SecretManagerTokenIdentifier secretManager;
-082private 

[36/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 22b1b9b..07453ce 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -26,7 +26,7 @@ under the License.
 2007 - 2017 The Apache Software Foundation
 
   File: 2203,
- Errors: 14460,
+ Errors: 14436,
  Warnings: 0,
  Infos: 0
   
@@ -9701,7 +9701,7 @@ under the License.
   0
 
 
-  42
+  22
 
   
   
@@ -23673,7 +23673,7 @@ under the License.
   0
 
 
-  124
+  122
 
   
   
@@ -26935,7 +26935,7 @@ under the License.
   0
 
 
-  2
+  0
 
   
   

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/coc.html
--
diff --git a/coc.html b/coc.html
index 561c6a7..70effb7 100644
--- a/coc.html
+++ b/coc.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Code of Conduct Policy
@@ -380,7 +380,7 @@ email to mailto:priv...@hbase.apache.org;>the priv
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-26
+  Last Published: 
2017-05-27
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/cygwin.html
--
diff --git a/cygwin.html b/cygwin.html
index 063a86b..59b24b0 100644
--- a/cygwin.html
+++ b/cygwin.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Installing Apache HBase (TM) on Windows using 
Cygwin
 
@@ -679,7 +679,7 @@ Now your HBase server is running, start 
coding and build that next
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-26
+  Last Published: 
2017-05-27
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/dependencies.html
--
diff --git a/dependencies.html b/dependencies.html
index 426a940..2c2a69a 100644
--- a/dependencies.html
+++ b/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Project Dependencies
 
@@ -524,7 +524,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-26
+  Last Published: 
2017-05-27
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/dependency-convergence.html
--
diff --git a/dependency-convergence.html b/dependency-convergence.html
index 3e18452..45cfc2d 100644
--- a/dependency-convergence.html
+++ b/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Reactor Dependency Convergence
 
@@ -1849,7 +1849,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-26
+  Last Published: 
2017-05-27
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/dependency-info.html
--
diff --git a/dependency-info.html b/dependency-info.html
index a5d6366..8205910 100644
--- a/dependency-info.html
+++ b/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Dependency Information
 
@@ -318,7 +318,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-26
+  Last Published: 
2017-05-27
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/dependency-management.html
--
diff --git a/dependency-management.html b/dependency-management.html
index 1eb172f..20d02eb 100644
--- a/dependency-management.html
+++ b/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+   

[06/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.html
--
diff --git 
a/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.html 
b/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.html
index 6c856aa..b884e7e 100644
--- a/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.html
+++ b/testdevapidocs/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public class TestAsyncTableAdminApi
+public class TestAsyncTableAdminApi
 extends TestAsyncAdminBase
 Class to test asynchronous table admin operations.
 
@@ -335,7 +335,7 @@ extends 
 
 name
-publicorg.junit.rules.TestName name
+publicorg.junit.rules.TestName name
 
 
 
@@ -352,7 +352,7 @@ extends 
 
 TestAsyncTableAdminApi
-publicTestAsyncTableAdminApi()
+publicTestAsyncTableAdminApi()
 
 
 
@@ -369,7 +369,7 @@ extends 
 
 testTableExist
-publicvoidtestTableExist()
+publicvoidtestTableExist()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -383,7 +383,7 @@ extends 
 
 testListTables
-publicvoidtestListTables()
+publicvoidtestListTables()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -397,7 +397,7 @@ extends 
 
 testGetTableDescriptor
-publicvoidtestGetTableDescriptor()
+publicvoidtestGetTableDescriptor()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -411,7 +411,7 @@ extends 
 
 testCreateTable
-publicvoidtestCreateTable()
+publicvoidtestCreateTable()
  throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -425,7 +425,7 @@ extends 
 
 getStateFromMeta
-privateorg.apache.hadoop.hbase.client.TableState.StategetStateFromMeta(org.apache.hadoop.hbase.TableNametable)
+privateorg.apache.hadoop.hbase.client.TableState.StategetStateFromMeta(org.apache.hadoop.hbase.TableNametable)
   throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -439,7 +439,7 @@ extends 
 
 testCreateTableNumberOfRegions
-publicvoidtestCreateTableNumberOfRegions()
+publicvoidtestCreateTableNumberOfRegions()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -453,7 +453,7 @@ extends 
 
 testCreateTableWithRegions
-publicvoidtestCreateTableWithRegions()
+publicvoidtestCreateTableWithRegions()
 throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -467,7 +467,7 @@ extends 
 
 verifyRoundRobinDistribution
-privatevoidverifyRoundRobinDistribution(org.apache.hadoop.hbase.client.ClusterConnectionc,
+privatevoidverifyRoundRobinDistribution(org.apache.hadoop.hbase.client.ClusterConnectionc,
   
org.apache.hadoop.hbase.client.RegionLocatorregionLocator,
   intexpectedRegions)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -483,7 +483,7 @@ extends 
 
 testCreateTableWithOnlyEmptyStartRow
-publicvoidtestCreateTableWithOnlyEmptyStartRow()
+publicvoidtestCreateTableWithOnlyEmptyStartRow()
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -497,7 +497,7 @@ extends 
 
 testCreateTableWithEmptyRowInTheSplitKeys
-publicvoidtestCreateTableWithEmptyRowInTheSplitKeys()
+publicvoidtestCreateTableWithEmptyRowInTheSplitKeys()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -511,7 +511,7 @@ extends 
 
 testDeleteTable
-publicvoidtestDeleteTable()
+publicvoidtestDeleteTable()
  throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 
 Throws:
@@ -525,7 +525,7 @@ extends 
 
 testDeleteTables

[19/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
index 7aeb0fb..f01cf3a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.RegionScannerCloseCallBack.html
@@ -27,3384 +27,3404 @@
 019package 
org.apache.hadoop.hbase.regionserver;
 020
 021import 
com.google.common.annotations.VisibleForTesting;
-022
-023import java.io.FileNotFoundException;
-024import java.io.IOException;
-025import java.io.InterruptedIOException;
-026import java.net.BindException;
-027import java.net.InetSocketAddress;
-028import java.net.UnknownHostException;
-029import java.nio.ByteBuffer;
-030import java.util.*;
-031import java.util.Map.Entry;
-032import 
java.util.concurrent.ConcurrentHashMap;
-033import 
java.util.concurrent.ConcurrentMap;
-034import 
java.util.concurrent.atomic.AtomicBoolean;
-035import 
java.util.concurrent.atomic.AtomicLong;
-036import 
java.util.concurrent.atomic.LongAdder;
-037
-038import 
org.apache.commons.lang.mutable.MutableObject;
-039import org.apache.commons.logging.Log;
-040import 
org.apache.commons.logging.LogFactory;
-041import 
org.apache.hadoop.conf.Configuration;
-042import org.apache.hadoop.fs.Path;
-043import 
org.apache.hadoop.hbase.ByteBufferCell;
-044import org.apache.hadoop.hbase.Cell;
-045import 
org.apache.hadoop.hbase.CellScannable;
-046import 
org.apache.hadoop.hbase.CellScanner;
-047import 
org.apache.hadoop.hbase.CellUtil;
-048import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-049import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-050import 
org.apache.hadoop.hbase.HBaseIOException;
-051import 
org.apache.hadoop.hbase.HConstants;
-052import 
org.apache.hadoop.hbase.HRegionInfo;
-053import 
org.apache.hadoop.hbase.HTableDescriptor;
-054import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-055import 
org.apache.hadoop.hbase.NotServingRegionException;
-056import 
org.apache.hadoop.hbase.ServerName;
-057import 
org.apache.hadoop.hbase.TableName;
-058import 
org.apache.hadoop.hbase.UnknownScannerException;
-059import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-060import 
org.apache.hadoop.hbase.client.Append;
-061import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-062import 
org.apache.hadoop.hbase.client.Delete;
-063import 
org.apache.hadoop.hbase.client.Durability;
-064import 
org.apache.hadoop.hbase.client.Get;
-065import 
org.apache.hadoop.hbase.client.Increment;
-066import 
org.apache.hadoop.hbase.client.Mutation;
-067import 
org.apache.hadoop.hbase.client.Put;
-068import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-069import 
org.apache.hadoop.hbase.client.Result;
-070import 
org.apache.hadoop.hbase.client.RowMutations;
-071import 
org.apache.hadoop.hbase.client.Scan;
-072import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-073import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-074import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-075import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-076import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-077import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-078import 
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-079import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-080import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-081import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-082import 
org.apache.hadoop.hbase.ipc.QosPriority;
-083import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-084import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-085import 
org.apache.hadoop.hbase.ipc.RpcServer;
-086import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-087import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-088import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-089import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-090import 
org.apache.hadoop.hbase.ipc.ServerRpcController;
-091import 
org.apache.hadoop.hbase.master.MasterRpcServices;
-092import 
org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;
-093import 
org.apache.hadoop.hbase.quotas.OperationQuota;
-094import 
org.apache.hadoop.hbase.quotas.QuotaUtil;
-095import 
org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
-096import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-097import 
org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;
-098import 
org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;
-099import 

[10/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
index 9307cf8..58df534 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.html
@@ -70,14 +70,14 @@
 062@org.jamon.annotations.Argument(name 
= "master", type = "HMaster")},
 063  optionalArguments = {
 064@org.jamon.annotations.Argument(name 
= "format", type = "String"),
-065@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
-066@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
-067@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
-068@org.jamon.annotations.Argument(name 
= "assignmentManager", type = "AssignmentManager"),
-069@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName"),
-070@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
-071@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
-072@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean")})
+065@org.jamon.annotations.Argument(name 
= "serverManager", type = "ServerManager"),
+066@org.jamon.annotations.Argument(name 
= "filter", type = "String"),
+067@org.jamon.annotations.Argument(name 
= "assignmentManager", type = "AssignmentManager"),
+068@org.jamon.annotations.Argument(name 
= "frags", type = "MapString,Integer"),
+069@org.jamon.annotations.Argument(name 
= "deadServers", type = "SetServerName"),
+070@org.jamon.annotations.Argument(name 
= "metaLocation", type = "ServerName"),
+071@org.jamon.annotations.Argument(name 
= "catalogJanitorEnabled", type = "boolean"),
+072@org.jamon.annotations.Argument(name 
= "servers", type = "ListServerName")})
 073public class MasterStatusTmpl
 074  extends 
org.jamon.AbstractTemplateProxy
 075{
@@ -135,142 +135,142 @@
 127  return m_format__IsNotDefault;
 128}
 129private boolean 
m_format__IsNotDefault;
-130// 21, 1
-131public void 
setFrags(MapString,Integer frags)
+130// 28, 1
+131public void 
setServerManager(ServerManager serverManager)
 132{
-133  // 21, 1
-134  m_frags = frags;
-135  m_frags__IsNotDefault = true;
+133  // 28, 1
+134  m_serverManager = serverManager;
+135  m_serverManager__IsNotDefault = 
true;
 136}
-137public MapString,Integer 
getFrags()
+137public ServerManager 
getServerManager()
 138{
-139  return m_frags;
+139  return m_serverManager;
 140}
-141private MapString,Integer 
m_frags;
-142public boolean 
getFrags__IsNotDefault()
+141private ServerManager 
m_serverManager;
+142public boolean 
getServerManager__IsNotDefault()
 143{
-144  return m_frags__IsNotDefault;
+144  return 
m_serverManager__IsNotDefault;
 145}
-146private boolean 
m_frags__IsNotDefault;
-147// 24, 1
-148public void 
setDeadServers(SetServerName deadServers)
+146private boolean 
m_serverManager__IsNotDefault;
+147// 26, 1
+148public void setFilter(String 
filter)
 149{
-150  // 24, 1
-151  m_deadServers = deadServers;
-152  m_deadServers__IsNotDefault = 
true;
+150  // 26, 1
+151  m_filter = filter;
+152  m_filter__IsNotDefault = true;
 153}
-154public SetServerName 
getDeadServers()
+154public String getFilter()
 155{
-156  return m_deadServers;
+156  return m_filter;
 157}
-158private SetServerName 
m_deadServers;
-159public boolean 
getDeadServers__IsNotDefault()
+158private String m_filter;
+159public boolean 
getFilter__IsNotDefault()
 160{
-161  return 
m_deadServers__IsNotDefault;
+161  return m_filter__IsNotDefault;
 162}
-163private boolean 
m_deadServers__IsNotDefault;
-164// 28, 1
-165public void 
setServerManager(ServerManager serverManager)
+163private boolean 
m_filter__IsNotDefault;
+164// 29, 1
+165public void 
setAssignmentManager(AssignmentManager assignmentManager)
 166{
-167  // 28, 1
-168  m_serverManager = serverManager;
-169  m_serverManager__IsNotDefault = 
true;
+167  // 29, 1
+168  m_assignmentManager = 
assignmentManager;
+169  m_assignmentManager__IsNotDefault = 
true;
 170}
-171public ServerManager 
getServerManager()
+171public AssignmentManager 
getAssignmentManager()
 172{
-173  return m_serverManager;
+173  return m_assignmentManager;
 174}
-175private ServerManager 
m_serverManager;
-176public boolean 

[39/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/apidocs/org/apache/hadoop/hbase/HConstants.html
--
diff --git a/apidocs/org/apache/hadoop/hbase/HConstants.html 
b/apidocs/org/apache/hadoop/hbase/HConstants.html
index de97f75..edbf3f4 100644
--- a/apidocs/org/apache/hadoop/hbase/HConstants.html
+++ b/apidocs/org/apache/hadoop/hbase/HConstants.html
@@ -1862,12 +1862,8 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 static http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 WAL_STORAGE_POLICY
 Configuration name of WAL storage policy
- Valid values are:
-  NONE: no preference in destination of block replicas
-  ONE_SSD: place only one block replica in SSD and the remaining in default 
storage
-  and ALL_SSD: place all block replicas on SSD
-
- See 
http://hadoop.apache.org/docs/r2.6.0/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html
+ Valid values are: HOT, COLD, WARM, ALL_SSD, ONE_SSD, LAZY_PERSIST
+ See 
http://hadoop.apache.org/docs/r2.7.3/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html
 
 
 
@@ -5380,14 +5376,10 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 WAL_STORAGE_POLICY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String WAL_STORAGE_POLICY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String WAL_STORAGE_POLICY
 Configuration name of WAL storage policy
- Valid values are:
-  NONE: no preference in destination of block replicas
-  ONE_SSD: place only one block replica in SSD and the remaining in default 
storage
-  and ALL_SSD: place all block replicas on SSD
-
- See 
http://hadoop.apache.org/docs/r2.6.0/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html
+ Valid values are: HOT, COLD, WARM, ALL_SSD, ONE_SSD, LAZY_PERSIST
+ See 
http://hadoop.apache.org/docs/r2.7.3/hadoop-project-dist/hadoop-hdfs/ArchivalStorage.html
 
 See Also:
 Constant
 Field Values
@@ -5400,7 +5392,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 DEFAULT_WAL_STORAGE_POLICY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DEFAULT_WAL_STORAGE_POLICY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String DEFAULT_WAL_STORAGE_POLICY
 
 See Also:
 Constant
 Field Values
@@ -5413,7 +5405,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 METRICS_RIT_STUCK_WARNING_THRESHOLD
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String METRICS_RIT_STUCK_WARNING_THRESHOLD
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String METRICS_RIT_STUCK_WARNING_THRESHOLD
 Region in Transition metrics threshold time
 
 See Also:
@@ -5427,7 +5419,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 LOAD_BALANCER_SLOP_KEY
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String LOAD_BALANCER_SLOP_KEY
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String LOAD_BALANCER_SLOP_KEY
 
 See Also:
 Constant
 Field Values
@@ -5440,7 +5432,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 DELIMITER
-public static finalint DELIMITER
+public static finalint DELIMITER
 delimiter used between portions of a region name
 
 See Also:
@@ -5454,7 +5446,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 NORMAL_QOS
-public static finalint NORMAL_QOS
+public static finalint NORMAL_QOS
 QOS attributes: these attributes are used to demarcate RPC 
call processing
  by different set of handlers. For example, HIGH_QOS tagged methods are
  handled by high priority handlers.
@@ -5470,7 +5462,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 REPLICATION_QOS
-public static finalint REPLICATION_QOS
+public static finalint REPLICATION_QOS
 
 See Also:
 Constant
 Field Values
@@ -5483,7 +5475,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 REPLAY_QOS
-public static finalint REPLAY_QOS
+public static finalint REPLAY_QOS
 
 See Also:
 Constant
 Field Values
@@ -5496,7 +5488,7 @@ public static finalhttp://docs.oracle.com/javase/8/docs/api/java/
 
 
 QOS_THRESHOLD
-public static finalint QOS_THRESHOLD
+public static finalint QOS_THRESHOLD
 
 See 

[07/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/sponsors.html
--
diff --git a/sponsors.html b/sponsors.html
index 071b242..d718b89 100644
--- a/sponsors.html
+++ b/sponsors.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Apache HBase™ Sponsors
 
@@ -338,7 +338,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-26
+  Last Published: 
2017-05-27
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/supportingprojects.html
--
diff --git a/supportingprojects.html b/supportingprojects.html
index da85fb1..f855ba9 100644
--- a/supportingprojects.html
+++ b/supportingprojects.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Supporting Projects
 
@@ -525,7 +525,7 @@ under the License. -->
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-26
+  Last Published: 
2017-05-27
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/team-list.html
--
diff --git a/team-list.html b/team-list.html
index 4f0242d..5d1b0a1 100644
--- a/team-list.html
+++ b/team-list.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Team list
 
@@ -799,7 +799,7 @@ window.onLoad = init();
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2017-05-26
+  Last Published: 
2017-05-27
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/testdevapidocs/allclasses-frame.html
--
diff --git a/testdevapidocs/allclasses-frame.html 
b/testdevapidocs/allclasses-frame.html
index 4fe42e4..5622f56 100644
--- a/testdevapidocs/allclasses-frame.html
+++ b/testdevapidocs/allclasses-frame.html
@@ -1714,6 +1714,7 @@
 TestScanRowPrefix
 TestScanWildcardColumnTracker
 TestScanWithBloomError
+TestScanWithoutFetchingData
 TestSchemaResource
 TestSCVFWithMiniCluster
 TestSecureAsyncWALReplay

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/testdevapidocs/allclasses-noframe.html
--
diff --git a/testdevapidocs/allclasses-noframe.html 
b/testdevapidocs/allclasses-noframe.html
index 68a9fa7..508e65e 100644
--- a/testdevapidocs/allclasses-noframe.html
+++ b/testdevapidocs/allclasses-noframe.html
@@ -1714,6 +1714,7 @@
 TestScanRowPrefix
 TestScanWildcardColumnTracker
 TestScanWithBloomError
+TestScanWithoutFetchingData
 TestSchemaResource
 TestSCVFWithMiniCluster
 TestSecureAsyncWALReplay

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/testdevapidocs/constant-values.html
--
diff --git a/testdevapidocs/constant-values.html 
b/testdevapidocs/constant-values.html
index 0e504db..b490876 100644
--- a/testdevapidocs/constant-values.html
+++ b/testdevapidocs/constant-values.html
@@ -3489,6 +3489,25 @@
 
 
 
+org.apache.hadoop.hbase.client.TestScanWithoutFetchingData
+
+Modifier and Type
+Constant Field
+Value
+
+
+
+
+
+privatestaticfinalint
+COUNT
+10
+
+
+
+
+
+
 org.apache.hadoop.hbase.client.TestSizeFailures
 
 Modifier and Type

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/testdevapidocs/index-all.html
--
diff --git a/testdevapidocs/index-all.html b/testdevapidocs/index-all.html
index 68ad6b3..2619430 100644
--- a/testdevapidocs/index-all.html
+++ b/testdevapidocs/index-all.html
@@ -1630,6 +1630,8 @@
 
 assertResult(TableName,
 int) - Method in class org.apache.hadoop.hbase.client.TestAsyncSnapshotAdminApi
 
+assertResult(int,
 Result) - Method in class org.apache.hadoop.hbase.client.TestScanWithoutFetchingData
+
 assertResultEquals(Result,
 int) - Method in class org.apache.hadoop.hbase.client.AbstractTestAsyncTableScan
 
 assertResultEquals(Result,
 int, int, int) - Method in class org.apache.hadoop.hbase.client.TestBatchScanResultCache
@@ -3001,6 +3003,8 @@
 
 CF
 - Static variable in class org.apache.hadoop.hbase.client.TestMvccConsistentScanner
 
+CF
 - Static variable in class org.apache.hadoop.hbase.client.TestScanWithoutFetchingData
+
 cf
 - Static variable in class 
org.apache.hadoop.hbase.codec.prefixtree.row.data.TestRowDataComplexQualifiers
 
 

[02/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.html
--
diff --git 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.html
 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.html
index 090b6b8..7757a67 100644
--- 
a/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.html
+++ 
b/testdevapidocs/src-html/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.html
@@ -25,857 +25,854 @@
 017 */
 018package org.apache.hadoop.hbase.client;
 019
-020import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-021import static 
org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY;
-022import static 
org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
-023import static 
org.junit.Assert.assertEquals;
-024import static 
org.junit.Assert.assertFalse;
-025import static 
org.junit.Assert.assertTrue;
-026import static org.junit.Assert.fail;
-027
-028import java.io.IOException;
-029import java.util.ArrayList;
-030import java.util.Arrays;
-031import java.util.HashMap;
-032import java.util.Iterator;
-033import java.util.List;
-034import java.util.Map;
-035import java.util.Optional;
-036import java.util.Set;
-037import 
java.util.concurrent.CompletionException;
-038import java.util.regex.Pattern;
-039
-040import org.apache.hadoop.fs.Path;
-041import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-042import 
org.apache.hadoop.hbase.HColumnDescriptor;
-043import 
org.apache.hadoop.hbase.HConstants;
-044import 
org.apache.hadoop.hbase.HRegionInfo;
-045import 
org.apache.hadoop.hbase.HRegionLocation;
-046import 
org.apache.hadoop.hbase.HTableDescriptor;
-047import 
org.apache.hadoop.hbase.ServerName;
-048import 
org.apache.hadoop.hbase.TableName;
-049import 
org.apache.hadoop.hbase.TableNotEnabledException;
-050import 
org.apache.hadoop.hbase.master.MasterFileSystem;
-051import 
org.apache.hadoop.hbase.testclassification.ClientTests;
-052import 
org.apache.hadoop.hbase.testclassification.LargeTests;
-053import 
org.apache.hadoop.hbase.util.Bytes;
-054import 
org.apache.hadoop.hbase.util.FSTableDescriptors;
-055import 
org.apache.hadoop.hbase.util.FSUtils;
-056import org.junit.Assert;
-057import org.junit.Rule;
-058import org.junit.Test;
-059import 
org.junit.experimental.categories.Category;
-060import org.junit.rules.TestName;
-061
-062/**
-063 * Class to test asynchronous table admin 
operations.
-064 */
-065@Category({LargeTests.class, 
ClientTests.class})
-066public class TestAsyncTableAdminApi 
extends TestAsyncAdminBase {
+020import static 
org.junit.Assert.assertEquals;
+021import static 
org.junit.Assert.assertFalse;
+022import static 
org.junit.Assert.assertTrue;
+023import static org.junit.Assert.fail;
+024
+025import java.io.IOException;
+026import java.util.ArrayList;
+027import java.util.Arrays;
+028import java.util.HashMap;
+029import java.util.Iterator;
+030import java.util.List;
+031import java.util.Map;
+032import java.util.Optional;
+033import java.util.Set;
+034import 
java.util.concurrent.CompletionException;
+035import java.util.regex.Pattern;
+036
+037import org.apache.hadoop.fs.Path;
+038import 
org.apache.hadoop.hbase.AsyncMetaTableAccessor;
+039import 
org.apache.hadoop.hbase.HColumnDescriptor;
+040import 
org.apache.hadoop.hbase.HConstants;
+041import 
org.apache.hadoop.hbase.HRegionInfo;
+042import 
org.apache.hadoop.hbase.HRegionLocation;
+043import 
org.apache.hadoop.hbase.HTableDescriptor;
+044import 
org.apache.hadoop.hbase.ServerName;
+045import 
org.apache.hadoop.hbase.TableName;
+046import 
org.apache.hadoop.hbase.TableNotEnabledException;
+047import 
org.apache.hadoop.hbase.master.MasterFileSystem;
+048import 
org.apache.hadoop.hbase.testclassification.ClientTests;
+049import 
org.apache.hadoop.hbase.testclassification.LargeTests;
+050import 
org.apache.hadoop.hbase.util.Bytes;
+051import 
org.apache.hadoop.hbase.util.FSTableDescriptors;
+052import 
org.apache.hadoop.hbase.util.FSUtils;
+053import org.junit.Assert;
+054import org.junit.Rule;
+055import org.junit.Test;
+056import 
org.junit.experimental.categories.Category;
+057import org.junit.rules.TestName;
+058
+059/**
+060 * Class to test asynchronous table admin 
operations.
+061 */
+062@Category({LargeTests.class, 
ClientTests.class})
+063public class TestAsyncTableAdminApi 
extends TestAsyncAdminBase {
+064
+065  @Rule
+066  public TestName name = new 
TestName();
 067
-068  @Rule
-069  public TestName name = new 
TestName();
-070
-071  @Test
-072  public void testTableExist() throws 
Exception {
-073final TableName tableName = 
TableName.valueOf(name.getMethodName());
-074boolean exist;
+068  @Test
+069  public void testTableExist() throws 
Exception {
+070final TableName tableName = 
TableName.valueOf(name.getMethodName());
+071boolean exist;

[08/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
--
diff --git 
a/hbase-archetypes/hbase-shaded-client-project/dependency-management.html 
b/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
index f855604..ada40f3 100644
--- a/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
+++ b/hbase-archetypes/hbase-shaded-client-project/dependency-management.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-26
+Last Published: 2017-05-27
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/hbase-archetypes/hbase-shaded-client-project/index.html
--
diff --git a/hbase-archetypes/hbase-shaded-client-project/index.html 
b/hbase-archetypes/hbase-shaded-client-project/index.html
index 24df095..356ec3e 100644
--- a/hbase-archetypes/hbase-shaded-client-project/index.html
+++ b/hbase-archetypes/hbase-shaded-client-project/index.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-26
+Last Published: 2017-05-27
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/hbase-archetypes/hbase-shaded-client-project/integration.html
--
diff --git a/hbase-archetypes/hbase-shaded-client-project/integration.html 
b/hbase-archetypes/hbase-shaded-client-project/integration.html
index 7056414..bb61a70 100644
--- a/hbase-archetypes/hbase-shaded-client-project/integration.html
+++ b/hbase-archetypes/hbase-shaded-client-project/integration.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-26
+Last Published: 2017-05-27
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/hbase-archetypes/hbase-shaded-client-project/issue-tracking.html
--
diff --git a/hbase-archetypes/hbase-shaded-client-project/issue-tracking.html 
b/hbase-archetypes/hbase-shaded-client-project/issue-tracking.html
index 5a5e966..3a3ba4c 100644
--- a/hbase-archetypes/hbase-shaded-client-project/issue-tracking.html
+++ b/hbase-archetypes/hbase-shaded-client-project/issue-tracking.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 
 
@@ -27,7 +27,7 @@
 
 
 
-Last Published: 2017-05-26
+Last Published: 2017-05-27
   | Version: 
2.0.0-SNAPSHOT
   
 Apache HBase - Exemplar for 
hbase-shaded-client archetype

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/hbase-archetypes/hbase-shaded-client-project/license.html
--
diff --git a/hbase-archetypes/hbase-shaded-client-project/license.html 
b/hbase-archetypes/hbase-shaded-client-project/license.html
index dd35be7..5c79170 100644
--- a/hbase-archetypes/hbase-shaded-client-project/license.html
+++ b/hbase-archetypes/hbase-shaded-client-project/license.html
@@ -1,5 +1,5 @@
 http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd;>
-
+
 http://www.w3.org/1999/xhtml; xml:lang="en" lang="en">
   
 
@@ -10,7 +10,7 @@
   @import url("./css/site.css");
 
 
-
+
 
 

[15/40] hbase-site git commit: Published site at 82d554e3783372cc6b05489452c815b57c06f6cd.

2017-05-27 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/f8f0a032/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
index 7aeb0fb..f01cf3a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/regionserver/RSRpcServices.ScannerListener.html
@@ -27,3384 +27,3404 @@
 019package 
org.apache.hadoop.hbase.regionserver;
 020
 021import 
com.google.common.annotations.VisibleForTesting;
-022
-023import java.io.FileNotFoundException;
-024import java.io.IOException;
-025import java.io.InterruptedIOException;
-026import java.net.BindException;
-027import java.net.InetSocketAddress;
-028import java.net.UnknownHostException;
-029import java.nio.ByteBuffer;
-030import java.util.*;
-031import java.util.Map.Entry;
-032import 
java.util.concurrent.ConcurrentHashMap;
-033import 
java.util.concurrent.ConcurrentMap;
-034import 
java.util.concurrent.atomic.AtomicBoolean;
-035import 
java.util.concurrent.atomic.AtomicLong;
-036import 
java.util.concurrent.atomic.LongAdder;
-037
-038import 
org.apache.commons.lang.mutable.MutableObject;
-039import org.apache.commons.logging.Log;
-040import 
org.apache.commons.logging.LogFactory;
-041import 
org.apache.hadoop.conf.Configuration;
-042import org.apache.hadoop.fs.Path;
-043import 
org.apache.hadoop.hbase.ByteBufferCell;
-044import org.apache.hadoop.hbase.Cell;
-045import 
org.apache.hadoop.hbase.CellScannable;
-046import 
org.apache.hadoop.hbase.CellScanner;
-047import 
org.apache.hadoop.hbase.CellUtil;
-048import 
org.apache.hadoop.hbase.DoNotRetryIOException;
-049import 
org.apache.hadoop.hbase.DroppedSnapshotException;
-050import 
org.apache.hadoop.hbase.HBaseIOException;
-051import 
org.apache.hadoop.hbase.HConstants;
-052import 
org.apache.hadoop.hbase.HRegionInfo;
-053import 
org.apache.hadoop.hbase.HTableDescriptor;
-054import 
org.apache.hadoop.hbase.MultiActionResultTooLarge;
-055import 
org.apache.hadoop.hbase.NotServingRegionException;
-056import 
org.apache.hadoop.hbase.ServerName;
-057import 
org.apache.hadoop.hbase.TableName;
-058import 
org.apache.hadoop.hbase.UnknownScannerException;
-059import 
org.apache.hadoop.hbase.classification.InterfaceAudience;
-060import 
org.apache.hadoop.hbase.client.Append;
-061import 
org.apache.hadoop.hbase.client.ConnectionUtils;
-062import 
org.apache.hadoop.hbase.client.Delete;
-063import 
org.apache.hadoop.hbase.client.Durability;
-064import 
org.apache.hadoop.hbase.client.Get;
-065import 
org.apache.hadoop.hbase.client.Increment;
-066import 
org.apache.hadoop.hbase.client.Mutation;
-067import 
org.apache.hadoop.hbase.client.Put;
-068import 
org.apache.hadoop.hbase.client.RegionReplicaUtil;
-069import 
org.apache.hadoop.hbase.client.Result;
-070import 
org.apache.hadoop.hbase.client.RowMutations;
-071import 
org.apache.hadoop.hbase.client.Scan;
-072import 
org.apache.hadoop.hbase.client.VersionInfoUtil;
-073import 
org.apache.hadoop.hbase.conf.ConfigurationObserver;
-074import 
org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
-075import 
org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
-076import 
org.apache.hadoop.hbase.exceptions.ScannerResetException;
-077import 
org.apache.hadoop.hbase.filter.ByteArrayComparable;
-078import 
org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-079import 
org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
-080import 
org.apache.hadoop.hbase.ipc.HBaseRpcController;
-081import 
org.apache.hadoop.hbase.ipc.PriorityFunction;
-082import 
org.apache.hadoop.hbase.ipc.QosPriority;
-083import 
org.apache.hadoop.hbase.ipc.RpcCallContext;
-084import 
org.apache.hadoop.hbase.ipc.RpcCallback;
-085import 
org.apache.hadoop.hbase.ipc.RpcServer;
-086import 
org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
-087import 
org.apache.hadoop.hbase.ipc.RpcServerFactory;
-088import 
org.apache.hadoop.hbase.ipc.RpcServerInterface;
-089import 
org.apache.hadoop.hbase.ipc.ServerNotRunningYetException;
-090import 
org.apache.hadoop.hbase.ipc.ServerRpcController;
-091import 
org.apache.hadoop.hbase.master.MasterRpcServices;
-092import 
org.apache.hadoop.hbase.quotas.ActivePolicyEnforcement;
-093import 
org.apache.hadoop.hbase.quotas.OperationQuota;
-094import 
org.apache.hadoop.hbase.quotas.QuotaUtil;
-095import 
org.apache.hadoop.hbase.quotas.RegionServerRpcQuotaManager;
-096import 
org.apache.hadoop.hbase.quotas.RegionServerSpaceQuotaManager;
-097import 
org.apache.hadoop.hbase.quotas.SpaceQuotaSnapshot;
-098import 
org.apache.hadoop.hbase.quotas.SpaceViolationPolicyEnforcement;
-099import 
org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;

hbase git commit: HBASE-18042 Client Compatibility breaks between versions 1.2 and 1.3

2017-05-27 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 8cbb0411b -> 2277c2b63


HBASE-18042 Client Compatibility breaks between versions 1.2 and 1.3


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2277c2b6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2277c2b6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2277c2b6

Branch: refs/heads/branch-1.3
Commit: 2277c2b63680df2af9edb3c534f0359e0ea14b5d
Parents: 8cbb041
Author: zhangduo 
Authored: Fri May 26 11:43:47 2017 +0800
Committer: zhangduo 
Committed: Sat May 27 17:56:17 2017 +0800

--
 .../hbase/regionserver/RSRpcServices.java   |  42 +++---
 .../hbase/client/TestAlwaysSetScannerId.java|   5 +-
 .../hadoop/hbase/client/TestLeaseRenewal.java   |   3 +-
 .../client/TestScanWithoutFetchingData.java | 132 +++
 4 files changed, 161 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2277c2b6/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index d3d64c8..d37a287 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -19,6 +19,8 @@
 package org.apache.hadoop.hbase.regionserver;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
 import com.google.protobuf.ByteString;
 import com.google.protobuf.Message;
 import com.google.protobuf.RpcController;
@@ -43,6 +45,7 @@ import java.util.Set;
 import java.util.TreeSet;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.commons.lang.mutable.MutableObject;
@@ -249,7 +252,10 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
 
   private final AtomicLong scannerIdGen = new AtomicLong(0L);
   private final ConcurrentMap scanners = new 
ConcurrentHashMap<>();
-
+  // Hold the name of a closed scanner for a while. This is used to keep 
compatible for old clients
+  // which may send next or close request to a region scanner which has 
already been exhausted. The
+  // entries will be removed automatically after scannerLeaseTimeoutPeriod.
+  private final Cache closedScanners;
   /**
* The lease timeout period for client scanners (milliseconds).
*/
@@ -1024,6 +1030,9 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
 isa = new InetSocketAddress(initialIsa.getHostName(), address.getPort());
 rpcServer.setErrorHandler(this);
 rs.setName(name);
+
+closedScanners = CacheBuilder.newBuilder()
+.expireAfterAccess(scannerLeaseTimeoutPeriod, 
TimeUnit.MILLISECONDS).build();
   }
 
   @Override
@@ -2430,18 +2439,18 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
 String scannerName = Long.toString(request.getScannerId());
 RegionScannerHolder rsh = scanners.get(scannerName);
 if (rsh == null) {
-  // just ignore the close request if scanner does not exists.
-  if (request.hasCloseScanner() && request.getCloseScanner()) {
+  // just ignore the next or close request if scanner does not exists.
+  if (closedScanners.getIfPresent(scannerName) != null) {
 throw SCANNER_ALREADY_CLOSED;
   } else {
 LOG.warn("Client tried to access missing scanner " + scannerName);
 throw new UnknownScannerException(
-"Unknown scanner '" + scannerName + "'. This can happen due to any 
of the following "
-+ "reasons: a) Scanner id given is wrong, b) Scanner lease 
expired because of "
-+ "long wait between consecutive client checkins, c) Server 
may be closing down, "
-+ "d) RegionServer restart during upgrade.\nIf the issue is 
due to reason (b), a "
-+ "possible fix would be increasing the value of"
-+ "'hbase.client.scanner.timeout.period' configuration.");
+"Unknown scanner '" + scannerName + "'. This can happen due to any 
of the following " +
+"reasons: a) Scanner id given is wrong, b) Scanner lease 
expired because of " +
+"long wait between consecutive client checkins, c) Server may 
be closing down, " +
+"d) RegionServer restart during upgrade.\nIf 

hbase git commit: HBASE-18042 Client Compatibility breaks between versions 1.2 and 1.3

2017-05-27 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master efc7edc81 -> 6846b0394


HBASE-18042 Client Compatibility breaks between versions 1.2 and 1.3


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6846b039
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6846b039
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6846b039

Branch: refs/heads/master
Commit: 6846b03944d7e72301b825d4d118732c0ca65577
Parents: efc7edc
Author: zhangduo 
Authored: Thu May 25 11:02:09 2017 +0800
Committer: zhangduo 
Committed: Sat May 27 17:55:49 2017 +0800

--
 .../hbase/regionserver/RSRpcServices.java   |  58 +---
 .../hbase/client/TestAlwaysSetScannerId.java|   5 +-
 .../hadoop/hbase/client/TestLeaseRenewal.java   |   3 +-
 .../client/TestScanWithoutFetchingData.java | 131 +++
 4 files changed, 175 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6846b039/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index b3ca94d..1f3fede 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -19,6 +19,8 @@
 package org.apache.hadoop.hbase.regionserver;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -27,10 +29,20 @@ import java.net.BindException;
 import java.net.InetSocketAddress;
 import java.net.UnknownHostException;
 import java.nio.ByteBuffer;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
 import java.util.Map.Entry;
+import java.util.NavigableMap;
+import java.util.Set;
+import java.util.TreeSet;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.LongAdder;
@@ -194,7 +206,6 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MapReduceProtos.ScanMet
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsRequest;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.GetSpaceQuotaSnapshotsResponse.TableQuotaSnapshot;
-import 
org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceViolationPolicy;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos.RequestHeader;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescriptor;
 import 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
@@ -265,7 +276,10 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
 
   private final AtomicLong scannerIdGen = new AtomicLong(0L);
   private final ConcurrentMap scanners = new 
ConcurrentHashMap<>();
-
+  // Hold the name of a closed scanner for a while. This is used to keep 
compatible for old clients
+  // which may send next or close request to a region scanner which has 
already been exhausted. The
+  // entries will be removed automatically after scannerLeaseTimeoutPeriod.
+  private final Cache closedScanners;
   /**
* The lease timeout period for client scanners (milliseconds).
*/
@@ -1168,6 +1182,9 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
 isa = new InetSocketAddress(initialIsa.getHostName(), address.getPort());
 rpcServer.setErrorHandler(this);
 rs.setName(name);
+
+closedScanners = CacheBuilder.newBuilder()
+.expireAfterAccess(scannerLeaseTimeoutPeriod, 
TimeUnit.MILLISECONDS).build();
   }
 
   @Override
@@ -2790,18 +2807,18 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
 String scannerName = Long.toString(request.getScannerId());
 RegionScannerHolder rsh = scanners.get(scannerName);
 if (rsh == null) {
-  // just ignore the close request if scanner does not exists.
-  if (request.hasCloseScanner() && request.getCloseScanner()) {
+  // just ignore the next or close 

hbase git commit: HBASE-18042 Client Compatibility breaks between versions 1.2 and 1.3

2017-05-27 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-1 d8c1e0e00 -> 1a37f3be8


HBASE-18042 Client Compatibility breaks between versions 1.2 and 1.3


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1a37f3be
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1a37f3be
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1a37f3be

Branch: refs/heads/branch-1
Commit: 1a37f3be82f3d4e111ff846a79583472da86da4d
Parents: d8c1e0e
Author: zhangduo 
Authored: Thu May 25 11:02:09 2017 +0800
Committer: zhangduo 
Committed: Sat May 27 17:56:05 2017 +0800

--
 .../hbase/regionserver/RSRpcServices.java   |  48 ---
 .../hbase/client/TestAlwaysSetScannerId.java|   5 +-
 .../hadoop/hbase/client/TestLeaseRenewal.java   |   3 +-
 .../client/TestScanWithoutFetchingData.java | 133 +++
 4 files changed, 167 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1a37f3be/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 3f23d2b..69b8ac1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -19,6 +19,8 @@
 package org.apache.hadoop.hbase.regionserver;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
 import com.google.protobuf.ByteString;
 import com.google.protobuf.Message;
 import com.google.protobuf.RpcController;
@@ -44,6 +46,7 @@ import java.util.Set;
 import java.util.TreeSet;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.commons.lang.mutable.MutableObject;
@@ -197,8 +200,6 @@ import org.apache.hadoop.hbase.wal.WALSplitter;
 import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
 import org.apache.zookeeper.KeeperException;
 
-import com.google.common.annotations.VisibleForTesting;
-
 /**
  * Implements the regionserver RPC services.
  */
@@ -252,7 +253,10 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
 
   private final AtomicLong scannerIdGen = new AtomicLong(0L);
   private final ConcurrentMap scanners = new 
ConcurrentHashMap<>();
-
+  // Hold the name of a closed scanner for a while. This is used to keep 
compatible for old clients
+  // which may send next or close request to a region scanner which has 
already been exhausted. The
+  // entries will be removed automatically after scannerLeaseTimeoutPeriod.
+  private final Cache closedScanners;
   /**
* The lease timeout period for client scanners (milliseconds).
*/
@@ -1072,6 +1076,9 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
 isa = new InetSocketAddress(initialIsa.getHostName(), address.getPort());
 rpcServer.setErrorHandler(this);
 rs.setName(name);
+
+closedScanners = CacheBuilder.newBuilder()
+.expireAfterAccess(scannerLeaseTimeoutPeriod, 
TimeUnit.MILLISECONDS).build();
   }
 
   @Override
@@ -2492,18 +2499,18 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
 String scannerName = Long.toString(request.getScannerId());
 RegionScannerHolder rsh = scanners.get(scannerName);
 if (rsh == null) {
-  // just ignore the close request if scanner does not exists.
-  if (request.hasCloseScanner() && request.getCloseScanner()) {
+  // just ignore the next or close request if scanner does not exists.
+  if (closedScanners.getIfPresent(scannerName) != null) {
 throw SCANNER_ALREADY_CLOSED;
   } else {
 LOG.warn("Client tried to access missing scanner " + scannerName);
 throw new UnknownScannerException(
-"Unknown scanner '" + scannerName + "'. This can happen due to any 
of the following "
-+ "reasons: a) Scanner id given is wrong, b) Scanner lease 
expired because of "
-+ "long wait between consecutive client checkins, c) Server 
may be closing down, "
-+ "d) RegionServer restart during upgrade.\nIf the issue is 
due to reason (b), a "
-+ "possible fix would be increasing the value of"
-+ "'hbase.client.scanner.timeout.period' configuration.");
+"Unknown scanner '" + scannerName + "'. This can happen due to any