hbase git commit: HBASE-19879 Promote TestAcidGuaranteesXXX to LargeTests

2018-01-28 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/branch-2 380169eaf -> 5ef5c5a24


HBASE-19879 Promote TestAcidGuaranteesXXX to LargeTests


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5ef5c5a2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5ef5c5a2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5ef5c5a2

Branch: refs/heads/branch-2
Commit: 5ef5c5a2478bdff8a045b14f0154f0436c69aa81
Parents: 380169e
Author: zhangduo 
Authored: Mon Jan 29 13:04:58 2018 +0800
Committer: zhangduo 
Committed: Mon Jan 29 14:57:31 2018 +0800

--
 .../hadoop/hbase/AcidGuaranteesTestBase.java| 134 +++
 .../TestAcidGuaranteesWithAdaptivePolicy.java   |   6 +-
 .../TestAcidGuaranteesWithBasicPolicy.java  |   6 +-
 .../TestAcidGuaranteesWithEagerPolicy.java  |   7 +-
 ...TestAcidGuaranteesWithNoInMemCompaction.java | 122 +
 5 files changed, 147 insertions(+), 128 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5ef5c5a2/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestBase.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestBase.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestBase.java
new file mode 100644
index 000..c0aa1a0
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestBase.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import static org.apache.hadoop.hbase.AcidGuaranteesTestTool.FAMILIES;
+import static org.apache.hadoop.hbase.AcidGuaranteesTestTool.TABLE_NAME;
+
+import java.util.List;
+import java.util.stream.Stream;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.regionserver.CompactingMemStore;
+import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
+import org.apache.hadoop.hbase.regionserver.MemStoreLAB;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+
+/**
+ * Test case that uses multiple threads to read and write multifamily rows 
into a table, verifying
+ * that reads never see partially-complete writes. This can run as a junit 
test, or with a main()
+ * function which runs against a real cluster (eg for testing with failures, 
region movement, etc)
+ */
+public abstract class AcidGuaranteesTestBase {
+
+  private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
+
+  private AcidGuaranteesTestTool tool = new AcidGuaranteesTestTool();
+
+  protected abstract MemoryCompactionPolicy getMemoryCompactionPolicy();
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+// Set small flush size for minicluster so we exercise reseeking scanners
+Configuration conf = UTIL.getConfiguration();
+conf.set(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, String.valueOf(128 * 
1024));
+// prevent aggressive region split
+conf.set(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
+  ConstantSizeRegionSplitPolicy.class.getName());
+conf.setInt("hfile.format.version", 3); // for mob tests
+UTIL.startMiniCluster(1);
+  }
+
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+UTIL.shutdownMiniCluster();
+  }
+
+  @Before
+  public void setUp() throws Exception {
+MemoryCompactionPolicy policy = getMemoryCompactionPolicy();
+TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(TABLE_NAME)
+.setValue(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_KEY, 
policy.name());
+if (policy == MemoryCompactionPolicy.EAGER) {
+  

hbase git commit: HBASE-19879 Promote TestAcidGuaranteesXXX to LargeTests

2018-01-28 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 918599ef1 -> 80438924f


HBASE-19879 Promote TestAcidGuaranteesXXX to LargeTests


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/80438924
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/80438924
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/80438924

Branch: refs/heads/master
Commit: 80438924ff5066bf809cf0318168025cf646d415
Parents: 918599e
Author: zhangduo 
Authored: Mon Jan 29 13:04:58 2018 +0800
Committer: zhangduo 
Committed: Mon Jan 29 14:55:04 2018 +0800

--
 .../hadoop/hbase/AcidGuaranteesTestBase.java| 134 +++
 .../TestAcidGuaranteesWithAdaptivePolicy.java   |   6 +-
 .../TestAcidGuaranteesWithBasicPolicy.java  |   6 +-
 .../TestAcidGuaranteesWithEagerPolicy.java  |   6 +-
 ...TestAcidGuaranteesWithNoInMemCompaction.java | 117 +---
 5 files changed, 147 insertions(+), 122 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/80438924/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestBase.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestBase.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestBase.java
new file mode 100644
index 000..c0aa1a0
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/AcidGuaranteesTestBase.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import static org.apache.hadoop.hbase.AcidGuaranteesTestTool.FAMILIES;
+import static org.apache.hadoop.hbase.AcidGuaranteesTestTool.TABLE_NAME;
+
+import java.util.List;
+import java.util.stream.Stream;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.regionserver.CompactingMemStore;
+import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
+import org.apache.hadoop.hbase.regionserver.MemStoreLAB;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+
+/**
+ * Test case that uses multiple threads to read and write multifamily rows 
into a table, verifying
+ * that reads never see partially-complete writes. This can run as a junit 
test, or with a main()
+ * function which runs against a real cluster (eg for testing with failures, 
region movement, etc)
+ */
+public abstract class AcidGuaranteesTestBase {
+
+  private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
+
+  private AcidGuaranteesTestTool tool = new AcidGuaranteesTestTool();
+
+  protected abstract MemoryCompactionPolicy getMemoryCompactionPolicy();
+
+  @BeforeClass
+  public static void setUpBeforeClass() throws Exception {
+// Set small flush size for minicluster so we exercise reseeking scanners
+Configuration conf = UTIL.getConfiguration();
+conf.set(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, String.valueOf(128 * 
1024));
+// prevent aggressive region split
+conf.set(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
+  ConstantSizeRegionSplitPolicy.class.getName());
+conf.setInt("hfile.format.version", 3); // for mob tests
+UTIL.startMiniCluster(1);
+  }
+
+  @AfterClass
+  public static void tearDownAfterClass() throws Exception {
+UTIL.shutdownMiniCluster();
+  }
+
+  @Before
+  public void setUp() throws Exception {
+MemoryCompactionPolicy policy = getMemoryCompactionPolicy();
+TableDescriptorBuilder builder = 
TableDescriptorBuilder.newBuilder(TABLE_NAME)
+.setValue(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_KEY, 
policy.name());
+if (policy == MemoryCompactionPolicy.EAGER) {
+  

[06/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRemoveRegionMetrics.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRemoveRegionMetrics.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRemoveRegionMetrics.java
index b007161..27bd45c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRemoveRegionMetrics.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRemoveRegionMetrics.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,8 +17,10 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
+import java.io.IOException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CompatibilityFactory;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
@@ -31,21 +33,23 @@ import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.test.MetricsAssertHelper;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
-
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
 
-import java.io.IOException;
-
 @Category({RegionServerTests.class, LargeTests.class})
 public class TestRemoveRegionMetrics {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestRemoveRegionMetrics.class);
+
   private static MiniHBaseCluster cluster;
   private static Configuration conf;
   private static HBaseTestingUtility TEST_UTIL;

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java
index 570422e..0b68754 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestResettingCounters.java
@@ -1,5 +1,4 @@
-/*
- *
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -22,17 +21,18 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Increment;
 import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -40,6 +40,11 @@ import org.junit.rules.TestName;
 
 @Category({RegionServerTests.class, SmallTests.class})
 public class TestResettingCounters {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestResettingCounters.class);
+
   @Rule
   public TestName name = new TestName();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java
 

[24/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
index 106b7e9..d5b25e2 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/TestPerformanceEvaluation.java
@@ -19,17 +19,22 @@ package org.apache.hadoop.hbase;
 
 import static org.junit.Assert.*;
 
+import com.codahale.metrics.Histogram;
+import com.codahale.metrics.Snapshot;
+import com.codahale.metrics.UniformReservoir;
+import com.fasterxml.jackson.core.JsonGenerationException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import java.io.BufferedReader;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
+import java.util.LinkedList;
 import java.util.NoSuchElementException;
 import java.util.Queue;
 import java.util.Random;
-import java.util.LinkedList;
-
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -37,19 +42,18 @@ import 
org.apache.hadoop.hbase.PerformanceEvaluation.RandomReadTest;
 import org.apache.hadoop.hbase.PerformanceEvaluation.TestOptions;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import com.codahale.metrics.Histogram;
-import com.codahale.metrics.Snapshot;
-import com.codahale.metrics.UniformReservoir;
-import com.fasterxml.jackson.core.JsonGenerationException;
-import com.fasterxml.jackson.databind.JsonMappingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-
 @Category({MiscTests.class, SmallTests.class})
 public class TestPerformanceEvaluation {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestPerformanceEvaluation.class);
+
   private static final HBaseTestingUtility HTU = new HBaseTestingUtility();
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java
index fa03a17..327b7af 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestDriver.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,19 +17,25 @@
  */
 package org.apache.hadoop.hbase.mapred;
 
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.MapReduceTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.util.ProgramDriver;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.mockito.Mockito;
 
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-
 @Category({MapReduceTests.class, SmallTests.class})
 public class TestDriver {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestDriver.class);
+
   @Test
   public void testDriverMainMethod() throws Throwable {
 ProgramDriver programDriverMock = mock(ProgramDriver.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
index 584b4fe..12db348 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestGroupingTableMap.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
 

[19/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIIncrementRpcTimeout.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIIncrementRpcTimeout.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIIncrementRpcTimeout.java
index 2885d81..87b1481 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIIncrementRpcTimeout.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIIncrementRpcTimeout.java
@@ -18,13 +18,19 @@
 package org.apache.hadoop.hbase.client;
 
 import java.io.IOException;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.junit.ClassRule;
 import org.junit.experimental.categories.Category;
 
 @Category({ ClientTests.class, MediumTests.class })
 public class TestCIIncrementRpcTimeout extends AbstractTestCIRpcTimeout {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestCIIncrementRpcTimeout.class);
+
   @Override
   protected void execute(Table table) throws IOException {
 table.increment(new Increment(FAM_NAM).addColumn(FAM_NAM, FAM_NAM, 1));

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIPutOperationTimeout.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIPutOperationTimeout.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIPutOperationTimeout.java
index 3a17287..8c36bf6 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIPutOperationTimeout.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIPutOperationTimeout.java
@@ -18,13 +18,19 @@
 package org.apache.hadoop.hbase.client;
 
 import java.io.IOException;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.junit.ClassRule;
 import org.junit.experimental.categories.Category;
 
 @Category({ ClientTests.class, LargeTests.class })
 public class TestCIPutOperationTimeout extends AbstractTestCIOperationTimeout {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestCIPutOperationTimeout.class);
+
   @Override
   protected void execute(Table table) throws IOException {
 table.put(new Put(FAM_NAM).addColumn(FAM_NAM, FAM_NAM, FAM_NAM));

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIPutRpcTimeout.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIPutRpcTimeout.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIPutRpcTimeout.java
index b25ed5a..f5921fb 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIPutRpcTimeout.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCIPutRpcTimeout.java
@@ -18,13 +18,19 @@
 package org.apache.hadoop.hbase.client;
 
 import java.io.IOException;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.junit.ClassRule;
 import org.junit.experimental.categories.Category;
 
 @Category({ ClientTests.class, MediumTests.class })
 public class TestCIPutRpcTimeout extends AbstractTestCIRpcTimeout {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestCIPutRpcTimeout.class);
+
   @Override
   protected void execute(Table table) throws IOException {
 table.put(new Put(FAM_NAM).addColumn(FAM_NAM, FAM_NAM, FAM_NAM));

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCISleep.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCISleep.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCISleep.java
index 5b3af33..761922a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCISleep.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestCISleep.java
@@ -22,6 +22,7 @@ import static org.junit.Assert.fail;
 
 import java.net.SocketTimeoutException;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HConstants;
 

[10/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
index 214fe49..d41291e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/namespace/TestNamespaceAuditor.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,6 +17,13 @@
  */
 package org.apache.hadoop.hbase.namespace;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 import java.io.IOException;
 import java.util.Collections;
 import java.util.List;
@@ -26,14 +32,13 @@ import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
-
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.CategoryBasedTimeout;
 import org.apache.hadoop.hbase.Coprocessor;
 import org.apache.hadoop.hbase.CoprocessorEnvironment;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -79,24 +84,19 @@ import org.apache.zookeeper.KeeperException;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import org.junit.Rule;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.junit.rules.TestRule;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
 @Category(MediumTests.class)
 public class TestNamespaceAuditor {
-  @Rule public final TestRule timeout = CategoryBasedTimeout.builder().
-  withTimeout(this.getClass()).withLookingForStuckThread(true).build();
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestNamespaceAuditor.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestNamespaceAuditor.class);
   private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
   private static Admin ADMIN;

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.java
index 740caea..7b848f2 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.java
@@ -1,18 +1,19 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or 

[30/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/918599ef
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/918599ef
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/918599ef

Branch: refs/heads/master
Commit: 918599ef12639be6e2e19dcb5af8d79b7721d970
Parents: 851e179
Author: zhangduo 
Authored: Sun Jan 28 19:52:09 2018 +0800
Committer: zhangduo 
Committed: Mon Jan 29 08:43:56 2018 +0800

--
 ReorderImports.class| Bin 0 -> 5984 bytes
 .../exemplars/client/TestHelloHBase.java|   8 +-
 .../exemplars/shaded_client/TestHelloHBase.java |   8 +-
 .../hbase/backup/TestBackupBoundaryTests.java   |  10 ++-
 .../hbase/backup/TestBackupCommandLineTool.java |   7 +-
 .../hadoop/hbase/backup/TestBackupDelete.java   |  18 ++--
 .../hbase/backup/TestBackupDeleteRestore.java   |  33 ---
 .../backup/TestBackupDeleteWithFailures.java|  12 ++-
 .../hadoop/hbase/backup/TestBackupDescribe.java |  18 ++--
 .../hbase/backup/TestBackupHFileCleaner.java|   8 +-
 .../hbase/backup/TestBackupMultipleDeletes.java |  18 ++--
 .../hadoop/hbase/backup/TestBackupRepair.java   |  16 ++--
 .../hbase/backup/TestBackupShowHistory.java |  18 ++--
 .../hbase/backup/TestBackupSmallTests.java  |  17 ++--
 .../hbase/backup/TestBackupStatusProgress.java  |  18 ++--
 .../hbase/backup/TestBackupSystemTable.java |   9 +-
 .../hadoop/hbase/backup/TestFullBackup.java |  15 ++--
 .../hadoop/hbase/backup/TestFullBackupSet.java  |  17 ++--
 .../backup/TestFullBackupSetRestoreSet.java |  16 ++--
 .../backup/TestFullBackupWithFailures.java  |  15 ++--
 .../hadoop/hbase/backup/TestFullRestore.java|  33 ---
 .../hbase/backup/TestIncrementalBackup.java |  18 ++--
 .../TestIncrementalBackupDeleteTable.java   |  18 ++--
 .../TestIncrementalBackupMergeWithFailures.java |  16 ++--
 .../TestIncrementalBackupWithBulkLoad.java  |  17 ++--
 .../TestIncrementalBackupWithFailures.java  |  18 ++--
 .../hadoop/hbase/backup/TestRemoteBackup.java   |  33 ---
 .../hadoop/hbase/backup/TestRemoteRestore.java  |  31 +--
 .../backup/TestRepairAfterFailedDelete.java |  18 ++--
 .../hbase/backup/TestRestoreBoundaryTests.java  |   9 +-
 .../hbase/backup/TestSystemTableSnapshot.java   |   8 +-
 .../backup/master/TestBackupLogCleaner.java |  18 ++--
 .../hadoop/hbase/TestHColumnDescriptor.java |   6 ++
 .../hadoop/hbase/TestHTableDescriptor.java  |   7 +-
 .../hbase/TestInterfaceAudienceAnnotations.java |  17 ++--
 .../hadoop/hbase/TestRegionLocations.java   |   6 +-
 .../hadoop/hbase/client/TestAsyncProcess.java   |  15 ++--
 .../hadoop/hbase/client/TestAttributes.java |  10 ++-
 .../hbase/client/TestBufferedMutator.java   |   8 +-
 .../hbase/client/TestBufferedMutatorParams.java |  11 ++-
 .../client/TestClientExponentialBackoff.java|  15 +++-
 .../hbase/client/TestClientNoCluster.java   |  10 ++-
 .../hadoop/hbase/client/TestClientScanner.java  |   7 +-
 .../TestColumnFamilyDescriptorBuilder.java  |   6 ++
 .../hadoop/hbase/client/TestDelayingRunner.java |   9 +-
 .../hbase/client/TestDeleteTimeStamp.java   |  32 ---
 .../org/apache/hadoop/hbase/client/TestGet.java |  17 ++--
 .../client/TestHTableMultiplexerViaMocks.java   |  38 
 .../client/TestImmutableHColumnDescriptor.java  |  11 ++-
 .../hbase/client/TestImmutableHRegionInfo.java  |   9 +-
 .../client/TestImmutableHTableDescriptor.java   |  11 ++-
 .../hadoop/hbase/client/TestIncrement.java  |   9 +-
 .../hadoop/hbase/client/TestInterfaceAlign.java |   9 +-
 .../hbase/client/TestMetricsConnection.java |  38 
 .../hadoop/hbase/client/TestMutation.java   |   8 +-
 .../hadoop/hbase/client/TestOperation.java  |  10 ++-
 .../hbase/client/TestProcedureFuture.java   |  15 ++--
 .../hadoop/hbase/client/TestPutDotHas.java  |   8 +-
 .../hbase/client/TestRegionInfoDisplay.java |  15 ++--
 ...estRetriesExhaustedWithDetailsException.java |  30 +++
 .../client/TestReversedScannerCallable.java |   6 ++
 .../hadoop/hbase/client/TestRowComparator.java  |   7 ++
 .../apache/hadoop/hbase/client/TestScan.java|  18 ++--
 .../client/TestSimpleRequestController.java |   9 +-
 .../hbase/client/TestSnapshotFromAdmin.java |  13 ++-
 .../client/TestTableDescriptorBuilder.java  |  10 ++-
 .../exceptions/TestClientExceptionsUtil.java|  15 ++--
 .../hadoop/hbase/filter/TestComparators.java|   9 +-
 .../hadoop/hbase/filter/TestKeyOnlyFilter.java  |   7 +-
 .../hadoop/hbase/filter/TestLongComparator.java |  16 ++--
 .../hadoop/hbase/ipc/TestCellBlockBuilder.java  |   7 +-
 .../hadoop/hbase/ipc/TestFailedServersLog.java  |  32 ---
 .../hbase/ipc/TestHBaseRpcControllerImpl.java   

[02/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
index 965b399..e713a5a 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java
@@ -15,10 +15,24 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.tool;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.mockito.ArgumentMatchers.argThat;
+import static org.mockito.Matchers.anyLong;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Matchers.isA;
+import static org.mockito.Mockito.atLeastOnce;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.ScheduledThreadPoolExecutor;
 import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Table;
@@ -28,11 +42,11 @@ import org.apache.hadoop.util.ToolRunner;
 import org.apache.log4j.Appender;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.spi.LoggingEvent;
-import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
 import org.junit.After;
 import org.junit.Before;
-import org.junit.Rule;
+import org.junit.ClassRule;
 import org.junit.Ignore;
+import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.rules.TestName;
@@ -41,25 +55,16 @@ import org.mockito.ArgumentMatcher;
 import org.mockito.Mock;
 import org.mockito.runners.MockitoJUnitRunner;
 
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.ScheduledThreadPoolExecutor;
-
-import static org.junit.Assert.assertNotEquals;
-import static org.mockito.ArgumentMatchers.argThat;
-import static org.mockito.Matchers.anyLong;
-import static org.mockito.Matchers.eq;
-import static org.mockito.Matchers.isA;
-import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.atLeastOnce;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
+import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
 
 @RunWith(MockitoJUnitRunner.class)
 @Category({MediumTests.class})
 public class TestCanaryTool {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestCanaryTool.class);
+
   private HBaseTestingUtility testingUtility;
   private static final byte[] FAMILY = Bytes.toBytes("f");
   private static final byte[] COLUMN = Bytes.toBytes("col");

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
index a3ca323..e744394 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -22,6 +21,7 @@ import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
+
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
@@ -34,6 +34,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
@@ -51,7 +52,6 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
 import org.apache.hadoop.hbase.io.hfile.HFile;
 import org.apache.hadoop.hbase.io.hfile.HFileScanner;
 import 

[16/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
index 09aa4ff..df80fa0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -35,6 +35,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.Coprocessor;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
@@ -67,6 +68,7 @@ import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -81,6 +83,11 @@ import org.slf4j.LoggerFactory;
  */
 @Category({CoprocessorTests.class, MediumTests.class})
 public class TestWALObserver {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestWALObserver.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestWALObserver.class);
   private final static HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
index 8ec1a44..8861a69 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionDispatcher.java
@@ -20,8 +20,10 @@ package org.apache.hadoop.hbase.errorhandling;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.mockito.Mockito;
@@ -34,6 +36,11 @@ import org.slf4j.LoggerFactory;
  */
 @Category({MasterTests.class, SmallTests.class})
 public class TestForeignExceptionDispatcher {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestForeignExceptionDispatcher.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestForeignExceptionDispatcher.class);
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java
index 2ab534a..127e72f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/errorhandling/TestForeignExceptionSerialization.java
@@ -23,9 +23,10 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
-
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -34,11 +35,15 @@ import org.junit.experimental.categories.Category;
  */
 @Category({MasterTests.class, SmallTests.class})
 public class TestForeignExceptionSerialization {
+
+  @ClassRule
+  

[25/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
--
diff --git 
a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
 
b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
index 77f9899..b28db11 100644
--- 
a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
+++ 
b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestRefreshHFilesEndpoint.java
@@ -6,16 +6,15 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.coprocessor.example;
 
 import static org.junit.Assert.assertEquals;
@@ -24,10 +23,10 @@ import static org.junit.Assert.fail;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
@@ -51,6 +50,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.HFileTestUtil;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.junit.After;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
@@ -58,6 +58,11 @@ import org.slf4j.LoggerFactory;
 
 @Category(MediumTests.class)
 public class TestRefreshHFilesEndpoint {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestRefreshHFilesEndpoint.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestRefreshHFilesEndpoint.class);
   private static final HBaseTestingUtility HTU = new HBaseTestingUtility();
   private static final int NUM_MASTER = 1;

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestScanModifyingObserver.java
--
diff --git 
a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestScanModifyingObserver.java
 
b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestScanModifyingObserver.java
index d5d0ba1..f90a0f4 100644
--- 
a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestScanModifyingObserver.java
+++ 
b/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestScanModifyingObserver.java
@@ -25,7 +25,7 @@ import static org.junit.Assert.assertNull;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
@@ -41,12 +41,17 @@ import 
org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 @Category({ CoprocessorTests.class, MediumTests.class })
 public class TestScanModifyingObserver {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestScanModifyingObserver.class);
+
   private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
   private static final TableName NAME = 
TableName.valueOf("TestScanModifications");
   private static final byte[] FAMILY = Bytes.toBytes("f");

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestValueReplacingCompaction.java
--
diff --git 
a/hbase-examples/src/test/java/org/apache/hadoop/hbase/coprocessor/example/TestValueReplacingCompaction.java
 

[08/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
index ec714be..52d6aae 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionKeyRotation.java
@@ -1,4 +1,4 @@
-/*
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -26,11 +26,10 @@ import java.security.SecureRandom;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
-
 import javax.crypto.spec.SecretKeySpec;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -51,6 +50,7 @@ import 
org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -60,6 +60,11 @@ import org.slf4j.LoggerFactory;
 
 @Category({RegionServerTests.class, MediumTests.class})
 public class TestEncryptionKeyRotation {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestEncryptionKeyRotation.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestEncryptionKeyRotation.class);
   private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
   private static final Configuration conf = TEST_UTIL.getConfiguration();

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
index 42ef533..eef0b90 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEncryptionRandomKeying.java
@@ -1,4 +1,4 @@
-/*
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -23,9 +23,9 @@ import static org.junit.Assert.assertTrue;
 import java.security.Key;
 import java.util.ArrayList;
 import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -42,11 +42,17 @@ import 
org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 @Category({RegionServerTests.class, MediumTests.class})
 public class TestEncryptionRandomKeying {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestEncryptionRandomKeying.class);
+
   private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
   private static Configuration conf = TEST_UTIL.getConfiguration();
   private static HTableDescriptor htd;

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
index a770f8c..2c12341 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
@@ -28,10 +28,10 @@ import java.util.Random;
 import 

[12/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestShutdownBackupMaster.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestShutdownBackupMaster.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestShutdownBackupMaster.java
index 02d7f2f..91108ae 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestShutdownBackupMaster.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestShutdownBackupMaster.java
@@ -1,4 +1,4 @@
-/*
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -22,7 +22,7 @@ import static org.junit.Assert.assertNotNull;
 import java.io.IOException;
 import java.util.concurrent.CountDownLatch;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.CategoryBasedTimeout;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
@@ -32,10 +32,9 @@ import 
org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;
 import org.apache.zookeeper.KeeperException;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import org.junit.Rule;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.junit.rules.TestRule;
 
 /**
  * Test to confirm that we will not hang when stop a backup master which is 
trying to become the
@@ -43,8 +42,10 @@ import org.junit.rules.TestRule;
  */
 @Category({ MasterTests.class, MediumTests.class })
 public class TestShutdownBackupMaster {
-  @Rule public final TestRule timeout = 
CategoryBasedTimeout.builder().withTimeout(this.getClass()).
-  withLookingForStuckThread(true).build();
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestShutdownBackupMaster.class);
 
   private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
index 43a28ad..f944108 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestSplitLogManager.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -39,11 +38,11 @@ import java.io.IOException;
 import java.util.Map;
 import java.util.UUID;
 import java.util.concurrent.atomic.LongAdder;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.CoordinatedStateManager;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.ServerName;
@@ -66,6 +65,7 @@ import org.apache.zookeeper.ZooDefs.Ids;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.mockito.Mockito;
@@ -74,6 +74,11 @@ import org.slf4j.LoggerFactory;
 
 @Category({MasterTests.class, MediumTests.class})
 public class TestSplitLogManager {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestSplitLogManager.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestSplitLogManager.class);
 
   private final ServerManager sm = Mockito.mock(ServerManager.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java
index 5a75297..fa054b4 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableStateManager.java
@@ -1,6 +1,4 @@
-/*
- * Copyright The Apache Software Foundation
- *

[01/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
Repository: hbase
Updated Branches:
  refs/heads/master 851e17987 -> 918599ef1


http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
index 3cf65c5..3a39ee9 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -33,6 +32,7 @@ import java.util.concurrent.ThreadLocalRandom;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.RegionInfoBuilder;
@@ -44,6 +44,7 @@ import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
@@ -56,6 +57,11 @@ import org.slf4j.LoggerFactory;
 @RunWith(Parameterized.class)
 @Category({ RegionServerTests.class, LargeTests.class })
 public class TestBoundedRegionGroupingStrategy {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestBoundedRegionGroupingStrategy.class);
+
   private static final Logger LOG =
   LoggerFactory.getLogger(TestBoundedRegionGroupingStrategy.class);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java
index d9ee9eb..5aea0cf 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -33,6 +32,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.ServerName;
@@ -43,7 +43,6 @@ import org.apache.hadoop.hbase.client.RegionInfoBuilder;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
 import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
-// imports for things that haven't moved from regionserver.wal yet.
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -51,6 +50,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -60,6 +60,11 @@ import org.slf4j.LoggerFactory;
 
 @Category({RegionServerTests.class, MediumTests.class})
 public class TestFSHLogProvider {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestFSHLogProvider.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestFSHLogProvider.class);
 
   private static Configuration conf;

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
index b4160e9..814320a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
+++ 

[15/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
index 5ba7dfa..59d2229 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.fs;
 
 import java.io.FileNotFoundException;
@@ -27,7 +26,6 @@ import java.net.BindException;
 import java.net.ServerSocket;
 import java.util.List;
 import java.util.concurrent.CountDownLatch;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -35,6 +33,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
@@ -65,6 +64,7 @@ import org.apache.hadoop.ipc.RemoteException;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
+import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -77,6 +77,11 @@ import org.slf4j.LoggerFactory;
  */
 @Category({MiscTests.class, LargeTests.class})
 public class TestBlockReorder {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestBlockReorder.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestBlockReorder.class);
 
   private Configuration conf;

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferOutputStream.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferOutputStream.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferOutputStream.java
index f5101c7..ef62753 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferOutputStream.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestByteBufferOutputStream.java
@@ -21,14 +21,20 @@ import static org.junit.Assert.*;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
-
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 @Category(SmallTests.class)
 public class TestByteBufferOutputStream {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestByteBufferOutputStream.class);
+
   @Test
   public void testByteBufferReuse() throws IOException {
 byte [] someBytes = Bytes.toBytes("some bytes");

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java
index eed19a0..8796068 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestFileLink.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.io;
 
 import static org.junit.Assert.assertEquals;
@@ -27,12 +26,12 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.testclassification.IOTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
@@ -40,6 +39,7 @@ import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import 

[27/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRule.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRule.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRule.java
new file mode 100644
index 000..74bd70e
--- /dev/null
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRule.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.util.concurrent.TimeUnit;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestRule;
+import org.junit.rules.Timeout;
+import org.junit.runner.Description;
+import org.junit.runners.model.Statement;
+
+/**
+ * The class level TestRule for all the tests. Every test class should have a 
{@code ClassRule} with
+ * it.
+ * 
+ * For now it only sets a test method timeout based off the test categories 
small, medium, large.
+ * Based on junit Timeout TestRule; see 
https://github.com/junit-team/junit/wiki/Rules
+ */
+@InterfaceAudience.Private
+public final class HBaseClassTestRule implements TestRule {
+
+  private final Class clazz;
+
+  private final Timeout timeout;
+
+  private HBaseClassTestRule(Class clazz, Timeout timeout) {
+this.clazz = clazz;
+this.timeout = timeout;
+  }
+
+  /**
+   * Mainly used for {@link HBaseClassTestRuleChecker} to confirm that we use 
the correct
+   * class to generate timeout ClassRule.
+   */
+  public Class getClazz() {
+return clazz;
+  }
+
+  private static long getTimeoutInSeconds(Class clazz) {
+Category[] categories = clazz.getAnnotationsByType(Category.class);
+if (categories.length == 0) {
+  throw new IllegalArgumentException(clazz.getName() + " is not annotated 
with @Category");
+}
+for (Class c : categories[0].value()) {
+  if (c == SmallTests.class) {
+// See SmallTests. Supposed to run 15 seconds.
+return 30;
+  } else if (c == MediumTests.class) {
+// See MediumTests. Supposed to run 50 seconds.
+return 180;
+  } else if (c == LargeTests.class) {
+// Let large tests have a ten minute timeout.
+return TimeUnit.MINUTES.toSeconds(10);
+  }
+}
+throw new IllegalArgumentException(
+clazz.getName() + " does not have SmallTests/MediumTests/LargeTests in 
@Category");
+  }
+
+  public static HBaseClassTestRule forClass(Class clazz) {
+return new HBaseClassTestRule(clazz, 
Timeout.builder().withLookingForStuckThread(true)
+.withTimeout(getTimeoutInSeconds(clazz), TimeUnit.SECONDS).build());
+  }
+
+  @Override
+  public Statement apply(Statement base, Description description) {
+return timeout.apply(base, description);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRuleChecker.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRuleChecker.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRuleChecker.java
new file mode 100644
index 000..97c657f
--- /dev/null
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseClassTestRuleChecker.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or 

[29/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupMultipleDeletes.java
--
diff --git 
a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupMultipleDeletes.java
 
b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupMultipleDeletes.java
index 3d1997b..db1a4e2 100644
--- 
a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupMultipleDeletes.java
+++ 
b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupMultipleDeletes.java
@@ -1,13 +1,13 @@
 /**
  * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
+ * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
+ * regarding copyright ownership.  The ASF licenses this file
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
+ * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.backup;
 
 import static org.junit.Assert.assertEquals;
@@ -24,7 +23,7 @@ import static org.junit.Assert.assertTrue;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
-
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.backup.impl.BackupAdminImpl;
 import org.apache.hadoop.hbase.client.Connection;
@@ -35,10 +34,12 @@ import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.Assert;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
 
 /**
@@ -46,6 +47,11 @@ import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
  */
 @Category(LargeTests.class)
 public class TestBackupMultipleDeletes extends TestBackupBase {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestBackupMultipleDeletes.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestBackupMultipleDeletes.class);
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupRepair.java
--
diff --git 
a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupRepair.java
 
b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupRepair.java
index 4c76476..a036989 100644
--- 
a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupRepair.java
+++ 
b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupRepair.java
@@ -1,13 +1,13 @@
 /**
  * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
+ * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
+ * regarding copyright ownership.  The ASF licenses this file
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
+ * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -22,22 +22,26 @@ import static org.junit.Assert.assertTrue;
 
 import java.util.List;
 import java.util.Set;
-
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
 import org.apache.hadoop.hbase.backup.impl.TableBackupClient;
 import org.apache.hadoop.hbase.backup.impl.TableBackupClient.Stage;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.util.ToolRunner;
+import org.junit.ClassRule;
 import 

[21/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableRegionModel.java
--
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableRegionModel.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableRegionModel.java
index ea49515..4285c9b 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableRegionModel.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableRegionModel.java
@@ -1,5 +1,4 @@
-/*
- *
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -16,23 +15,28 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.rest.model;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.testclassification.RestTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;
-
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 @Category({RestTests.class, SmallTests.class})
 public class TestTableRegionModel extends TestModelBase {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestTableRegionModel.class);
+
   private static final String TABLE = "testtable";
   private static final byte[] START_KEY = Bytes.toBytes("abracadbra");
   private static final byte[] END_KEY = Bytes.toBytes("zzyzx");
@@ -67,7 +71,7 @@ public class TestTableRegionModel extends 
TestModelBase {
 assertTrue(Bytes.equals(model.getEndKey(), END_KEY));
 assertEquals(ID, model.getId());
 assertEquals(LOCATION, model.getLocation());
-assertEquals(model.getName(), 
+assertEquals(model.getName(),
   TABLE + "," + Bytes.toString(START_KEY) + "," + Long.toString(ID) +
   ".ad9860f031282c46ed431d7af8f94aca.");
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
--
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
index 55d149a..6b50ab7 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestTableSchemaModel.java
@@ -1,5 +1,4 @@
-/*
- *
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -16,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.rest.model;
 
 import static org.junit.Assert.assertEquals;
@@ -24,10 +22,10 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
 import java.util.Iterator;
-
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.RestTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
-
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
@@ -35,6 +33,11 @@ import org.slf4j.LoggerFactory;
 
 @Category({RestTests.class, SmallTests.class})
 public class TestTableSchemaModel extends TestModelBase {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestTableSchemaModel.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestTableSchemaModel.class);
 
   public static final String TABLE_NAME = "testTable";

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java
--
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java
index c3e7a4c..b352950 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/model/TestVersionModel.java
@@ -1,5 +1,4 @@
-/*
- *
+/**
  * Licensed to the 

[14/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
index d0dc6a3..27f9b7a 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileDataBlockEncoder.java
@@ -1,18 +1,19 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations
- * under the License.
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
  */
 package org.apache.hadoop.hbase.io.hfile;
 
@@ -26,8 +27,8 @@ import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
-
 import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
@@ -40,6 +41,7 @@ import org.apache.hadoop.hbase.testclassification.IOTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.ChecksumType;
 import org.apache.hadoop.hbase.util.RedundantKVGenerator;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
@@ -49,6 +51,11 @@ import org.junit.runners.Parameterized.Parameters;
 @RunWith(Parameterized.class)
 @Category({IOTests.class, SmallTests.class})
 public class TestHFileDataBlockEncoder {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestHFileDataBlockEncoder.class);
+
   private HFileDataBlockEncoder blockEncoder;
   private RedundantKVGenerator generator = new RedundantKVGenerator();
   private boolean includesMemstoreTS;
@@ -192,7 +199,7 @@ public class TestHFileDataBlockEncoder {
 .withChecksumType(ChecksumType.NULL)
 .build();
 HFileBlock b = new HFileBlock(BlockType.DATA, size, size, -1, buf,
-HFileBlock.FILL_HEADER, 0, 
+HFileBlock.FILL_HEADER, 0,
  0, -1, meta);
 return b;
   }
@@ -243,7 +250,7 @@ public class TestHFileDataBlockEncoder {
 
 for (DataBlockEncoding diskAlgo : DataBlockEncoding.values()) {
   for (boolean includesMemstoreTS : new boolean[] { false, true }) {
-HFileDataBlockEncoder dbe = (diskAlgo == DataBlockEncoding.NONE) ? 
+HFileDataBlockEncoder dbe = (diskAlgo == DataBlockEncoding.NONE) ?
 NoOpDataBlockEncoder.INSTANCE : new 
HFileDataBlockEncoderImpl(diskAlgo);
 configurations.add(new Object[] { dbe, new Boolean(includesMemstoreTS) 
});
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
index a049b329..134bb62 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileEncryption.java
+++ 

[26/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
index 9ee356f..3fc1c23 100644
--- 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferArray.java
@@ -23,16 +23,21 @@ import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
-
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.nio.ByteBuff;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 @Category({MiscTests.class, SmallTests.class})
 public class TestByteBufferArray {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestByteBufferArray.class);
+
   @Test
   public void testAsSubBufferWhenEndOffsetLandInLastBuffer() throws Exception {
 int capacity = 4 * 1024 * 1024;

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
index f4687fa..997a0bb 100644
--- 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteBufferUtils.java
@@ -1,18 +1,19 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations
- * under the License.
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
  */
 package org.apache.hadoop.hbase.util;
 
@@ -43,7 +44,7 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
-
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
@@ -51,6 +52,7 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.io.WritableUtils;
 import org.junit.AfterClass;
 import org.junit.Before;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
@@ -59,6 +61,11 @@ import org.junit.runners.Parameterized;
 @Category({MiscTests.class, SmallTests.class})
 @RunWith(Parameterized.class)
 public class TestByteBufferUtils {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestByteBufferUtils.class);
+
   private static final String UNSAFE_AVAIL_NAME = "UNSAFE_AVAIL";
   private static final String UNSAFE_UNALIGNED_NAME = "UNSAFE_UNALIGNED";
   private byte[] array;

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestByteRangeWithKVSerialization.java

[04/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java
index 2837045..643b629 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java
@@ -1,5 +1,4 @@
-/*
- *
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,6 +17,9 @@
  */
 package org.apache.hadoop.hbase.replication;
 
+import static org.junit.Assert.fail;
+
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.UnknownScannerException;
 import org.apache.hadoop.hbase.client.Result;
@@ -25,15 +27,18 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.ReplicationTests;
+import org.junit.ClassRule;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.fail;
-
 @Category({ReplicationTests.class, LargeTests.class})
 public class TestReplicationKillRS extends TestReplicationBase {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestReplicationKillRS.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestReplicationKillRS.class);
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillSlaveRS.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillSlaveRS.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillSlaveRS.java
index 6a824d0..15f667b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillSlaveRS.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillSlaveRS.java
@@ -1,4 +1,4 @@
-/*
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -17,8 +17,10 @@
  */
 package org.apache.hadoop.hbase.replication;
 
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.ReplicationTests;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
@@ -32,6 +34,10 @@ import org.junit.runners.Parameterized;
 @Category({ReplicationTests.class, LargeTests.class})
 public class TestReplicationKillSlaveRS extends TestReplicationKillRS {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestReplicationKillSlaveRS.class);
+
   @Test(timeout=30)
   public void killOneSlaveRS() throws Exception {
 loadTableAndKillRS(utility2);

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationProcedureRetry.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationProcedureRetry.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationProcedureRetry.java
index ab35b46..a2ae0b4 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationProcedureRetry.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationProcedureRetry.java
@@ -27,8 +27,8 @@ import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.spy;
 
 import java.io.IOException;
-
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.client.Admin;
@@ -40,6 +40,7 @@ import org.apache.zookeeper.KeeperException;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 

[18/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
index 43f2c49..fbf1eb0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultiRespectsLimits.java
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.client;
 
 import static junit.framework.TestCase.assertEquals;
@@ -23,11 +22,11 @@ import static junit.framework.TestCase.assertEquals;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.ThreadLocalRandom;
-
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellBuilderFactory;
 import org.apache.hadoop.hbase.CellBuilderType;
 import org.apache.hadoop.hbase.CompatibilityFactory;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -44,6 +43,7 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -55,6 +55,11 @@ import org.junit.rules.TestName;
  */
 @Category({MediumTests.class, ClientTests.class})
 public class TestMultiRespectsLimits {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestMultiRespectsLimits.class);
+
   private final static HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
   private static final MetricsAssertHelper METRICS_ASSERT =
   CompatibilityFactory.getInstance(MetricsAssertHelper.class);

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
index d73ebc0..ac6b9d1 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMultipleTimestamps.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -24,8 +23,8 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
-
 import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -33,6 +32,7 @@ import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -47,6 +47,11 @@ import org.slf4j.LoggerFactory;
  */
 @Category({LargeTests.class, ClientTests.class})
 public class TestMultipleTimestamps {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestMultipleTimestamps.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestMultipleTimestamps.class);
   private final static HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMvccConsistentScanner.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMvccConsistentScanner.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestMvccConsistentScanner.java
index 

[28/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java
index 6a2bb39..f566223 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -16,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.client;
 
 import static org.junit.Assert.assertEquals;
@@ -27,20 +25,18 @@ import static org.junit.Assert.fail;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import java.nio.ByteBuffer;
 import java.lang.reflect.InvocationTargetException;
+import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Set;
-
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.filter.FilterList;
 import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
-import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.security.access.Permission;
 import org.apache.hadoop.hbase.security.visibility.Authorizations;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
@@ -48,12 +44,21 @@ import 
org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Base64;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.Assert;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
+
 // TODO: cover more test cases
 @Category({ClientTests.class, SmallTests.class})
 public class TestGet {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestGet.class);
+
   private static final byte [] ROW = new byte [] {'r'};
 
   private static final String PB_GET = 
"CgNyb3ciEwoPdGVzdC5Nb2NrRmlsdGVyEgAwATgB";

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerViaMocks.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerViaMocks.java
 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerViaMocks.java
index ef59eed..cce4939 100644
--- 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerViaMocks.java
+++ 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestHTableMultiplexerViaMocks.java
@@ -1,12 +1,13 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -16,14 +17,6 @@
  */
 package org.apache.hadoop.hbase.client;
 
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-import java.io.IOException;
-
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.anyInt;
 

[13/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcMetrics.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcMetrics.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcMetrics.java
index cfcfb21..74827b9 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcMetrics.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcMetrics.java
@@ -1,5 +1,4 @@
-/*
- *
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -8,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -16,26 +15,31 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.ipc;
 
+import static org.junit.Assert.*;
+
 import org.apache.hadoop.hbase.CompatibilityFactory;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.NotServingRegionException;
 import org.apache.hadoop.hbase.RegionTooBusyException;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
 import org.apache.hadoop.hbase.exceptions.RegionMovedException;
+import org.apache.hadoop.hbase.test.MetricsAssertHelper;
 import org.apache.hadoop.hbase.testclassification.RPCTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.test.MetricsAssertHelper;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-
-import static org.junit.Assert.*;
-
 @Category({RPCTests.class, SmallTests.class})
 public class TestRpcMetrics {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestRpcMetrics.class);
+
   public MetricsAssertHelper HELPER = 
CompatibilityFactory.getInstance(MetricsAssertHelper.class);
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServer.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServer.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServer.java
index 0f10c49..560190b 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServer.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServer.java
@@ -24,7 +24,7 @@ import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 
 import java.nio.ByteBuffer;
-
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.io.ByteBufferPool;
 import org.apache.hadoop.hbase.ipc.RpcServer.CallCleanup;
 import org.apache.hadoop.hbase.nio.ByteBuff;
@@ -33,12 +33,17 @@ import org.apache.hadoop.hbase.nio.SingleByteBuff;
 import org.apache.hadoop.hbase.testclassification.RPCTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Pair;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 @Category({ RPCTests.class, SmallTests.class })
 public class TestRpcServer {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestRpcServer.class);
+
   @Test
   public void testAllocateByteBuffToReadInto() throws Exception {
 int maxBuffersInPool = 10;

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerSlowConnectionSetup.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerSlowConnectionSetup.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerSlowConnectionSetup.java
index e05db33..aedf57e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerSlowConnectionSetup.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/ipc/TestRpcServerSlowConnectionSetup.java
@@ -28,25 +28,18 @@ import java.net.InetSocketAddress;
 import java.net.Socket;
 import java.util.Arrays;
 import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import 

[11/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java
index e54eb66..056155f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestCreateTableProcedure.java
@@ -15,13 +15,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.master.procedure;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
 import org.apache.hadoop.hbase.DoNotRetryIOException;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.TableExistsException;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.testclassification.MasterTests;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ModifyRegionUtils;
+import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -44,6 +45,11 @@ import org.slf4j.LoggerFactory;
 
 @Category({MasterTests.class, MediumTests.class})
 public class TestCreateTableProcedure extends TestTableDDLProcedureBase {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestCreateTableProcedure.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestCreateTableProcedure.class);
 
   private static final String F1 = "f1";

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedureFromClient.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedureFromClient.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedureFromClient.java
index 899d114..d5ec62d 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedureFromClient.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteColumnFamilyProcedureFromClient.java
@@ -1,6 +1,4 @@
 /**
- * Copyright The Apache Software Foundation
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -24,11 +22,11 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
-
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -47,11 +45,17 @@ import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 @Category({MasterTests.class, LargeTests.class})
 public class TestDeleteColumnFamilyProcedureFromClient {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  
HBaseClassTestRule.forClass(TestDeleteColumnFamilyProcedureFromClient.class);
+
   private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
 
   private static final TableName TABLENAME =

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java
index fa2507e..10dca9d 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestDeleteNamespaceProcedure.java
@@ -15,7 +15,6 @@
  * See the License for the specific language 

[23/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java
index 699e773..5fa4b54 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatBase.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -23,13 +22,19 @@ import static org.junit.Assert.*;
 import java.net.Inet6Address;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
-
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 @Category({SmallTests.class})
 public class TestTableInputFormatBase {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestTableInputFormatBase.class);
+
   @Test
   public void testTableInputFormatBaseReverseDNSForIPv6()
   throws UnknownHostException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java
index 553869e..e76f5ad 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan1.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -19,9 +18,10 @@
 package org.apache.hadoop.hbase.mapreduce;
 
 import java.io.IOException;
-
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -32,6 +32,10 @@ import org.junit.experimental.categories.Category;
 @Category({VerySlowMapReduceTests.class, LargeTests.class})
 public class TestTableInputFormatScan1 extends TestTableInputFormatScanBase {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestTableInputFormatScan1.class);
+
   /**
* Tests a MR scan using specific start and stop rows.
*

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan2.java
--
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan2.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan2.java
index 02f893f..83158d8 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan2.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScan2.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -19,9 +18,10 @@
 package org.apache.hadoop.hbase.mapreduce;
 
 import java.io.IOException;
-
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -32,6 +32,10 @@ import org.junit.experimental.categories.Category;
 @Category({VerySlowMapReduceTests.class, LargeTests.class})
 public class TestTableInputFormatScan2 extends TestTableInputFormatScanBase {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestTableInputFormatScan2.class);
+
   /**
* Tests a MR scan using specific start and stop rows.
*


[05/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestExplicitColumnTracker.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestExplicitColumnTracker.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestExplicitColumnTracker.java
index 4e07f80..e87dfd8 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestExplicitColumnTracker.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestExplicitColumnTracker.java
@@ -1,5 +1,4 @@
-/*
- *
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -16,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.regionserver.querymatcher;
 
 import static org.junit.Assert.assertEquals;
@@ -26,18 +24,23 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.TreeSet;
-
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.KeyValue;
 import 
org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 @Category({ RegionServerTests.class, SmallTests.class })
 public class TestExplicitColumnTracker {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestExplicitColumnTracker.class);
+
   private final byte[] col1 = Bytes.toBytes("col1");
   private final byte[] col2 = Bytes.toBytes("col2");
   private final byte[] col3 = Bytes.toBytes("col3");

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestNewVersionBehaviorTracker.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestNewVersionBehaviorTracker.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestNewVersionBehaviorTracker.java
index 81a14a7..098c5ff 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestNewVersionBehaviorTracker.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestNewVersionBehaviorTracker.java
@@ -1,5 +1,4 @@
-/*
- *
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,24 +17,29 @@
  */
 package org.apache.hadoop.hbase.regionserver.querymatcher;
 
-import java.io.IOException;
+import static org.junit.Assert.assertEquals;
 
+import java.io.IOException;
 import org.apache.hadoop.hbase.CellComparator;
 import org.apache.hadoop.hbase.CellComparatorImpl;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.KeyValue;
 import 
org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker.DeleteResult;
 import 
org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import static org.junit.Assert.assertEquals;
-
 @Category({ RegionServerTests.class, SmallTests.class })
 public class TestNewVersionBehaviorTracker {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestNewVersionBehaviorTracker.class);
+
   private final byte[] col1 = Bytes.toBytes("col1");
   private final byte[] col2 = Bytes.toBytes("col2");
   private final byte[] row = Bytes.toBytes("row");

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanDeleteTracker.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanDeleteTracker.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestScanDeleteTracker.java
index 9723432..7cfc323 100644
--- 

[07/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
index 4ae92a4..e53ed36 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMinorCompaction.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -25,6 +24,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestCase;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HConstants;
@@ -39,6 +39,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.wal.WAL;
 import org.junit.After;
 import org.junit.Before;
+import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -51,11 +52,16 @@ import org.slf4j.LoggerFactory;
  */
 @Category({RegionServerTests.class, MediumTests.class})
 public class TestMinorCompaction {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestMinorCompaction.class);
+
   @Rule public TestName name = new TestName();
   private static final Logger LOG = 
LoggerFactory.getLogger(TestMinorCompaction.class.getName());
   private static final HBaseTestingUtility UTIL = 
HBaseTestingUtility.createLocalHTU();
   protected Configuration conf = UTIL.getConfiguration();
-  
+
   private HRegion r = null;
   private HTableDescriptor htd = null;
   private int compactionThreshold;

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
index c6a300f..814c86f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreCompaction.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -32,7 +31,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Random;
 import java.util.Set;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -40,6 +38,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellComparatorImpl;
 import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -67,6 +66,7 @@ import org.apache.hadoop.hbase.util.Pair;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -79,6 +79,11 @@ import org.slf4j.LoggerFactory;
  */
 @Category(MediumTests.class)
 public class TestMobStoreCompaction {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestMobStoreCompaction.class);
+
   @Rule
   public TestName name = new TestName();
   static final Logger LOG = 
LoggerFactory.getLogger(TestMobStoreCompaction.class.getName());

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreScanner.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreScanner.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreScanner.java
index b221106..0135350 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreScanner.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMobStoreScanner.java
@@ -1,5 

[22/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java
--
diff --git 
a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java
 
b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java
index e4766f6..ffc6ab8 100644
--- 
a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java
+++ 
b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/TestProcedureStoreTracker.java
@@ -15,26 +15,30 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.procedure2.store;
 
-import java.util.Random;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
-import org.apache.hadoop.hbase.testclassification.SmallTests;
+import java.util.Random;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import 
org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker.BitSetNode;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
-
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static 
org.apache.hadoop.hbase.procedure2.store.ProcedureStoreTracker.BitSetNode;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-@Category({MasterTests.class, SmallTests.class})
+@Category({MasterTests.class, MediumTests.class})
 public class TestProcedureStoreTracker {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestProcedureStoreTracker.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestProcedureStoreTracker.class);
 
   @Test

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java
--
diff --git 
a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java
 
b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java
index 31c9cf3..443386d 100644
--- 
a/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java
+++ 
b/hbase-procedure/src/test/java/org/apache/hadoop/hbase/procedure2/store/wal/TestStressWALProcedureStore.java
@@ -15,16 +15,20 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.procedure2.store.wal;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 import java.io.IOException;
 import java.util.Random;
 import java.util.concurrent.atomic.AtomicLong;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
 import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
 import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.LoadCounter;
@@ -32,22 +36,22 @@ import 
org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.TestProcedure;
 import org.apache.hadoop.hbase.procedure2.util.StringUtils;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.testclassification.MasterTests;
-
 import org.junit.After;
 import org.junit.Before;
-import org.junit.Test;
+import org.junit.ClassRule;
 import org.junit.Ignore;
+import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
 @Category({MasterTests.class, LargeTests.class})
 public class TestStressWALProcedureStore {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestStressWALProcedureStore.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestWALProcedureStore.class);
 
   private static final int PROCEDURE_STORE_SLOTS = 8;


[20/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java
index c5e6464..c418b5f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestStochasticBalancerJmxMetrics.java
@@ -15,7 +15,6 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase;
 
 import static org.junit.Assert.assertTrue;
@@ -46,6 +45,7 @@ import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.net.DNSToSwitchMapping;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.FixMethodOrder;
 import org.junit.Ignore;
 import org.junit.Test;
@@ -58,6 +58,11 @@ import org.slf4j.LoggerFactory;
 @FixMethodOrder(MethodSorters.NAME_ASCENDING)
 @Ignore
 public class TestStochasticBalancerJmxMetrics extends BalancerTestBase {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestStochasticBalancerJmxMetrics.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestStochasticBalancerJmxMetrics.class);
   private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
   private static int connectorPort = 61120;

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/TestTagRewriteCell.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestTagRewriteCell.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestTagRewriteCell.java
index 686c934..dc47661 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestTagRewriteCell.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestTagRewriteCell.java
@@ -1,10 +1,11 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  *
  * http://www.apache.org/licenses/LICENSE-2.0
  *
@@ -21,12 +22,17 @@ import static org.junit.Assert.assertTrue;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 @Category(SmallTests.class)
 public class TestTagRewriteCell {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestTagRewriteCell.class);
+
   @Test
   public void testHeapSize() {
 Cell originalCell = CellUtil.createCell(Bytes.toBytes("row"), 
Bytes.toBytes("value"));

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
index 0bdcb3e..c1735a7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestZooKeeper.java
@@ -1,5 +1,4 @@
 /**
- *
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -49,6 +48,7 @@ import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -58,6 +58,11 @@ import org.slf4j.LoggerFactory;
 
 @Category({MiscTests.class, LargeTests.class})
 public class TestZooKeeper {
+
+  

[17/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithTwoDifferentZKClusters.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithTwoDifferentZKClusters.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithTwoDifferentZKClusters.java
index f2a5221..c04ee4b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithTwoDifferentZKClusters.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/replication/TestReplicationAdminWithTwoDifferentZKClusters.java
@@ -1,19 +1,26 @@
 /**
- * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
- * agreements. See the NOTICE file distributed with this work for additional 
information regarding
- * copyright ownership. The ASF licenses this file to you under the Apache 
License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the 
License. You may obtain a
- * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless 
required by applicable
- * law or agreed to in writing, software distributed under the License is 
distributed on an "AS IS"
- * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 
implied. See the License
- * for the specific language governing permissions and limitations under the 
License.
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.client.replication;
 
 import static org.junit.Assert.assertTrue;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -26,12 +33,17 @@ import 
org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 @Category({ MediumTests.class, ClientTests.class })
 public class TestReplicationAdminWithTwoDifferentZKClusters {
 
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  
HBaseClassTestRule.forClass(TestReplicationAdminWithTwoDifferentZKClusters.class);
+
   private static Configuration conf1 = HBaseConfiguration.create();
   private static Configuration conf2;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java
index 88a50ea..e4b2b2f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/TestCellMessageCodec.java
@@ -26,22 +26,29 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
-
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import org.apache.hbase.thirdparty.com.google.common.io.CountingInputStream;
 import org.apache.hbase.thirdparty.com.google.common.io.CountingOutputStream;
 
 @Category({MiscTests.class, SmallTests.class})
 public class TestCellMessageCodec {
+
+  

[09/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java
index 9ecde78..01f8a2f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestSuperUserQuotaPermissions.java
@@ -1,12 +1,13 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -25,9 +26,9 @@ import java.security.PrivilegedExceptionAction;
 import java.util.Map;
 import java.util.concurrent.Callable;
 import java.util.concurrent.atomic.AtomicLong;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.DoNotRetryIOException;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.Waiter;
@@ -45,6 +46,7 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -57,6 +59,11 @@ import org.slf4j.LoggerFactory;
  */
 @Category(MediumTests.class)
 public class TestSuperUserQuotaPermissions {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  HBaseClassTestRule.forClass(TestSuperUserQuotaPermissions.class);
+
   private static final Logger LOG = 
LoggerFactory.getLogger(TestSuperUserQuotaPermissions.class);
   private static final HBaseTestingUtility TEST_UTIL = new 
HBaseTestingUtility();
   // Default to the user running the tests

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableQuotaViolationStore.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableQuotaViolationStore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableQuotaViolationStore.java
index 0e3f8ff..6803ee5 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableQuotaViolationStore.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestTableQuotaViolationStore.java
@@ -1,12 +1,13 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -28,7 +29,7 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicReference;
-
+import 

[03/30] hbase git commit: HBASE-19873 Add a CategoryBasedTimeout ClassRule for all UTs

2018-01-28 Thread zhangduo
http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCoprocessorWhitelistMasterObserver.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCoprocessorWhitelistMasterObserver.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCoprocessorWhitelistMasterObserver.java
index a357c1f..1686ba3 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCoprocessorWhitelistMasterObserver.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCoprocessorWhitelistMasterObserver.java
@@ -1,4 +1,4 @@
-/*
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -22,11 +22,10 @@ import static org.junit.Assert.fail;
 
 import java.io.IOException;
 import java.util.Optional;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.CategoryBasedTimeout;
 import org.apache.hadoop.hbase.Coprocessor;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -47,7 +46,6 @@ import org.junit.After;
 import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.junit.rules.TestRule;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -56,6 +54,11 @@ import org.slf4j.LoggerFactory;
  */
 @Category({SecurityTests.class, MediumTests.class})
 public class TestCoprocessorWhitelistMasterObserver extends SecureTestUtil {
+
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+  
HBaseClassTestRule.forClass(TestCoprocessorWhitelistMasterObserver.class);
+
   private static final Logger LOG =
   LoggerFactory.getLogger(TestCoprocessorWhitelistMasterObserver.class);
   private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
@@ -80,10 +83,6 @@ public class TestCoprocessorWhitelistMasterObserver extends 
SecureTestUtil {
 UTIL.shutdownMiniCluster();
   }
 
-  @ClassRule
-  public static TestRule timeout =
-  
CategoryBasedTimeout.forClass(TestCoprocessorWhitelistMasterObserver.class);
-
   /**
* Test a table modification adding a coprocessor path
* which is not whitelisted

http://git-wip-us.apache.org/repos/asf/hbase/blob/918599ef/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java
index df06f7e..328024c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestNamespaceCommands.java
@@ -1,4 +1,4 @@
-/*
+/**
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
@@ -21,12 +21,13 @@ import static org.apache.hadoop.hbase.AuthUtil.toGroupEntry;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
+import com.google.protobuf.BlockingRpcChannel;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
-
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -50,15 +51,21 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.JVMClusterUtil;
 import org.junit.AfterClass;
 import 

hbase-site git commit: INFRA-10751 Empty commit

2018-01-28 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site aa7ffc92b -> b58a219d1


INFRA-10751 Empty commit


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/b58a219d
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/b58a219d
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/b58a219d

Branch: refs/heads/asf-site
Commit: b58a219d187d3b2dfe501128fb09f29ac41eb330
Parents: aa7ffc9
Author: jenkins 
Authored: Sun Jan 28 15:13:08 2018 +
Committer: jenkins 
Committed: Sun Jan 28 15:13:08 2018 +

--

--




[08/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html
index add44d1..efa6d95 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/TableBackupClient.html
@@ -43,13 +43,13 @@
 035import 
org.apache.hadoop.hbase.backup.BackupType;
 036import 
org.apache.hadoop.hbase.backup.HBackupFileSystem;
 037import 
org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage;
-038import 
org.apache.yetus.audience.InterfaceAudience;
-039import org.slf4j.Logger;
-040import org.slf4j.LoggerFactory;
-041import 
org.apache.hadoop.hbase.client.Admin;
-042import 
org.apache.hadoop.hbase.client.Connection;
-043import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-044import 
org.apache.hadoop.hbase.util.FSUtils;
+038import 
org.apache.hadoop.hbase.client.Admin;
+039import 
org.apache.hadoop.hbase.client.Connection;
+040import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+041import 
org.apache.hadoop.hbase.util.FSUtils;
+042import 
org.apache.yetus.audience.InterfaceAudience;
+043import org.slf4j.Logger;
+044import org.slf4j.LoggerFactory;
 045
 046import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 047
@@ -88,360 +88,355 @@
 080  }
 081
 082  public void init(final Connection conn, 
final String backupId, BackupRequest request)
-083  throws IOException
-084  {
-085if (request.getBackupType() == 
BackupType.FULL) {
-086  backupManager = new 
BackupManager(conn, conn.getConfiguration());
-087} else {
-088  backupManager = new 
IncrementalBackupManager(conn, conn.getConfiguration());
-089}
-090this.backupId = backupId;
-091this.tableList = 
request.getTableList();
-092this.conn = conn;
-093this.conf = 
conn.getConfiguration();
-094this.fs = 
FSUtils.getCurrentFileSystem(conf);
-095backupInfo =
-096
backupManager.createBackupInfo(backupId, request.getBackupType(), tableList,
-097  request.getTargetRootDir(), 
request.getTotalTasks(), request.getBandwidth());
-098if (tableList == null || 
tableList.isEmpty()) {
-099  this.tableList = new 
ArrayList(backupInfo.getTables());
-100}
-101// Start new session
-102backupManager.startBackupSession();
-103  }
-104
-105  /**
-106   * Begin the overall backup.
-107   * @param backupInfo backup info
-108   * @throws IOException exception
-109   */
-110  protected void 
beginBackup(BackupManager backupManager, BackupInfo backupInfo)
-111  throws IOException {
-112
-113BackupSystemTable.snapshot(conn);
-114
backupManager.setBackupInfo(backupInfo);
-115// set the start timestamp of the 
overall backup
-116long startTs = 
EnvironmentEdgeManager.currentTime();
-117backupInfo.setStartTs(startTs);
-118// set overall backup status: 
ongoing
-119
backupInfo.setState(BackupState.RUNNING);
-120
backupInfo.setPhase(BackupPhase.REQUEST);
-121LOG.info("Backup " + 
backupInfo.getBackupId() + " started at " + startTs + ".");
-122
-123
backupManager.updateBackupInfo(backupInfo);
-124if (LOG.isDebugEnabled()) {
-125  LOG.debug("Backup session " + 
backupInfo.getBackupId() + " has been started.");
-126}
-127  }
-128
-129  protected String getMessage(Exception 
e) {
-130String msg = e.getMessage();
-131if (msg == null || msg.equals("")) 
{
-132  msg = e.getClass().getName();
-133}
-134return msg;
-135  }
-136
-137  /**
-138   * Delete HBase snapshot for backup.
-139   * @param backupInfo backup info
-140   * @throws Exception exception
-141   */
-142  protected static void 
deleteSnapshots(final Connection conn, BackupInfo backupInfo, Configuration 
conf)
-143  throws IOException {
-144LOG.debug("Trying to delete snapshot 
for full backup.");
-145for (String snapshotName : 
backupInfo.getSnapshotNames()) {
-146  if (snapshotName == null) {
-147continue;
-148  }
-149  LOG.debug("Trying to delete 
snapshot: " + snapshotName);
-150
-151  try (Admin admin = conn.getAdmin()) 
{
-152
admin.deleteSnapshot(snapshotName);
-153  }
-154  LOG.debug("Deleting the snapshot " 
+ snapshotName + " for backup " + backupInfo.getBackupId()
-155  + " succeeded.");
-156}
-157  }
-158
-159  /**
-160   * Clean up directories with prefix 
"exportSnapshot-", which are generated when exporting
-161   * snapshots.
-162   * @throws IOException exception
-163   */
-164  protected static void 
cleanupExportSnapshotLog(Configuration conf) throws IOException {
-165FileSystem fs = 
FSUtils.getCurrentFileSystem(conf);
-166Path stagingDir =

[41/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
index fb75405..138369a 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
@@ -202,7 +202,7 @@ extends 
 void
 execute()
-Backup request execution
+Backup request execution.
 
 
 
@@ -319,14 +319,14 @@ extends 
 
 execute
-publicvoidexecute()
+publicvoidexecute()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
-Backup request execution
+Backup request execution.
 
 Specified by:
 executein
 classTableBackupClient
 Throws:
-http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException - if the 
execution of the backup fails
 
 
 
@@ -336,7 +336,7 @@ extends 
 
 snapshotTable
-protectedvoidsnapshotTable(Adminadmin,
+protectedvoidsnapshotTable(Adminadmin,
  TableNametableName,
  http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringsnapshotName)
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
index c7279d7..a30f769 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
@@ -219,7 +219,7 @@ extends 
 http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String
 getIncrBackupLogFileList()
-Get list of WAL files eligible for incremental backup
+Get list of WAL files eligible for incremental backup.
 
 
 
@@ -346,12 +346,12 @@ extends http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetIncrBackupLogFileList()
   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
-Get list of WAL files eligible for incremental backup
+Get list of WAL files eligible for incremental backup.
 
 Returns:
 list of WAL files
 Throws:
-http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException - if getting the 
list of WAL files fails
 
 
 
@@ -361,7 +361,7 @@ extends 
 
 excludeAlreadyBackedUpWALs
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringexcludeAlreadyBackedUpWALs(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringlogList,
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringexcludeAlreadyBackedUpWALs(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringlogList,
   

[37/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html
index af3aeef..ce38532 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.BackupPhase.html
@@ -40,14 +40,14 @@
 032import 
org.apache.commons.lang3.StringUtils;
 033import 
org.apache.hadoop.hbase.TableName;
 034import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-035import 
org.apache.yetus.audience.InterfaceAudience;
-036import org.slf4j.Logger;
-037import org.slf4j.LoggerFactory;
-038import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-039import 
org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;
-040import 
org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupInfo.Builder;
-041import 
org.apache.hadoop.hbase.util.Bytes;
-042
+035import 
org.apache.hadoop.hbase.util.Bytes;
+036import 
org.apache.yetus.audience.InterfaceAudience;
+037import org.slf4j.Logger;
+038import org.slf4j.LoggerFactory;
+039
+040import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+041import 
org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;
+042import 
org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupInfo.Builder;
 043
 044/**
 045 * An object to encapsulate the 
information for each backup session
@@ -56,506 +56,503 @@
 048public class BackupInfo implements 
ComparableBackupInfo {
 049  private static final Logger LOG = 
LoggerFactory.getLogger(BackupInfo.class);
 050
-051  public static interface Filter {
-052
-053/**
-054 * Filter interface
-055 * @param info backup info
-056 * @return true if info passes 
filter, false otherwise
-057 */
-058public boolean apply(BackupInfo 
info);
-059  }
-060
-061  /**
-062   * Backup session states
-063   */
-064  public static enum BackupState {
-065RUNNING, COMPLETE, FAILED, ANY
-066  }
-067
-068  /**
-069   * BackupPhase - phases of an ACTIVE 
backup session (running), when state of a backup session is
-070   * BackupState.RUNNING
-071   */
-072  public static enum BackupPhase {
-073REQUEST, SNAPSHOT, 
PREPARE_INCREMENTAL, SNAPSHOTCOPY, INCREMENTAL_COPY, STORE_MANIFEST
-074  }
-075
-076  /**
-077   * Backup id
-078   */
-079  private String backupId;
-080
-081  /**
-082   * Backup type, full or incremental
-083   */
-084  private BackupType type;
-085
-086  /**
-087   * Target root directory for storing 
the backup files
-088   */
-089  private String backupRootDir;
-090
-091  /**
-092   * Backup state
-093   */
-094  private BackupState state;
-095
-096  /**
-097   * Backup phase
-098   */
-099  private BackupPhase phase = 
BackupPhase.REQUEST;
-100
-101  /**
-102   * Backup failure message
-103   */
-104  private String failedMsg;
-105
-106  /**
-107   * Backup status map for all tables
-108   */
-109  private MapTableName, 
BackupTableInfo backupTableInfoMap;
-110
-111  /**
-112   * Actual start timestamp of a backup 
process
-113   */
-114  private long startTs;
-115
-116  /**
-117   * Actual end timestamp of the backup 
process
-118   */
-119  private long completeTs;
-120
-121  /**
-122   * Total bytes of incremental logs 
copied
-123   */
-124  private long totalBytesCopied;
-125
-126  /**
-127   * For incremental backup, a location 
of a backed-up hlogs
-128   */
-129  private String hlogTargetDir = null;
-130
-131  /**
-132   * Incremental backup file list
-133   */
-134  private ListString 
incrBackupFileList;
-135
-136  /**
-137   * New region server log timestamps for 
table set after distributed log roll key - table name,
-138   * value - map of RegionServer hostname 
- last log rolled timestamp
-139   */
-140  private HashMapTableName, 
HashMapString, Long tableSetTimestampMap;
-141
-142  /**
-143   * Backup progress in %% (0-100)
-144   */
-145  private int progress;
-146
-147  /**
-148   * Number of parallel workers. -1 - 
system defined
-149   */
-150  private int workers = -1;
-151
-152  /**
-153   * Bandwidth per worker in MB per sec. 
-1 - unlimited
-154   */
-155  private long bandwidth = -1;
-156
-157  public BackupInfo() {
-158backupTableInfoMap = new 
HashMapTableName, BackupTableInfo();
-159  }
-160
-161  public BackupInfo(String backupId, 
BackupType type, TableName[] tables, String targetRootDir) {
-162this();
-163this.backupId = backupId;
-164this.type = type;
-165this.backupRootDir = targetRootDir;
-166this.addTables(tables);
-167if (type == BackupType.INCREMENTAL) 
{
-168  
setHLogTargetDir(BackupUtils.getLogBackupDir(targetRootDir, backupId));
-169}
-170this.startTs = 0;
-171this.completeTs = 0;
-172  }

[34/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.html
--
diff --git a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.html
index af3aeef..ce38532 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.html
@@ -40,14 +40,14 @@
 032import 
org.apache.commons.lang3.StringUtils;
 033import 
org.apache.hadoop.hbase.TableName;
 034import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-035import 
org.apache.yetus.audience.InterfaceAudience;
-036import org.slf4j.Logger;
-037import org.slf4j.LoggerFactory;
-038import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-039import 
org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;
-040import 
org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupInfo.Builder;
-041import 
org.apache.hadoop.hbase.util.Bytes;
-042
+035import 
org.apache.hadoop.hbase.util.Bytes;
+036import 
org.apache.yetus.audience.InterfaceAudience;
+037import org.slf4j.Logger;
+038import org.slf4j.LoggerFactory;
+039
+040import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+041import 
org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;
+042import 
org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupInfo.Builder;
 043
 044/**
 045 * An object to encapsulate the 
information for each backup session
@@ -56,506 +56,503 @@
 048public class BackupInfo implements 
ComparableBackupInfo {
 049  private static final Logger LOG = 
LoggerFactory.getLogger(BackupInfo.class);
 050
-051  public static interface Filter {
-052
-053/**
-054 * Filter interface
-055 * @param info backup info
-056 * @return true if info passes 
filter, false otherwise
-057 */
-058public boolean apply(BackupInfo 
info);
-059  }
-060
-061  /**
-062   * Backup session states
-063   */
-064  public static enum BackupState {
-065RUNNING, COMPLETE, FAILED, ANY
-066  }
-067
-068  /**
-069   * BackupPhase - phases of an ACTIVE 
backup session (running), when state of a backup session is
-070   * BackupState.RUNNING
-071   */
-072  public static enum BackupPhase {
-073REQUEST, SNAPSHOT, 
PREPARE_INCREMENTAL, SNAPSHOTCOPY, INCREMENTAL_COPY, STORE_MANIFEST
-074  }
-075
-076  /**
-077   * Backup id
-078   */
-079  private String backupId;
-080
-081  /**
-082   * Backup type, full or incremental
-083   */
-084  private BackupType type;
-085
-086  /**
-087   * Target root directory for storing 
the backup files
-088   */
-089  private String backupRootDir;
-090
-091  /**
-092   * Backup state
-093   */
-094  private BackupState state;
-095
-096  /**
-097   * Backup phase
-098   */
-099  private BackupPhase phase = 
BackupPhase.REQUEST;
-100
-101  /**
-102   * Backup failure message
-103   */
-104  private String failedMsg;
-105
-106  /**
-107   * Backup status map for all tables
-108   */
-109  private MapTableName, 
BackupTableInfo backupTableInfoMap;
-110
-111  /**
-112   * Actual start timestamp of a backup 
process
-113   */
-114  private long startTs;
-115
-116  /**
-117   * Actual end timestamp of the backup 
process
-118   */
-119  private long completeTs;
-120
-121  /**
-122   * Total bytes of incremental logs 
copied
-123   */
-124  private long totalBytesCopied;
-125
-126  /**
-127   * For incremental backup, a location 
of a backed-up hlogs
-128   */
-129  private String hlogTargetDir = null;
-130
-131  /**
-132   * Incremental backup file list
-133   */
-134  private ListString 
incrBackupFileList;
-135
-136  /**
-137   * New region server log timestamps for 
table set after distributed log roll key - table name,
-138   * value - map of RegionServer hostname 
- last log rolled timestamp
-139   */
-140  private HashMapTableName, 
HashMapString, Long tableSetTimestampMap;
-141
-142  /**
-143   * Backup progress in %% (0-100)
-144   */
-145  private int progress;
-146
-147  /**
-148   * Number of parallel workers. -1 - 
system defined
-149   */
-150  private int workers = -1;
-151
-152  /**
-153   * Bandwidth per worker in MB per sec. 
-1 - unlimited
-154   */
-155  private long bandwidth = -1;
-156
-157  public BackupInfo() {
-158backupTableInfoMap = new 
HashMapTableName, BackupTableInfo();
-159  }
-160
-161  public BackupInfo(String backupId, 
BackupType type, TableName[] tables, String targetRootDir) {
-162this();
-163this.backupId = backupId;
-164this.type = type;
-165this.backupRootDir = targetRootDir;
-166this.addTables(tables);
-167if (type == BackupType.INCREMENTAL) 
{
-168  
setHLogTargetDir(BackupUtils.getLogBackupDir(targetRootDir, backupId));
-169}
-170this.startTs = 0;
-171this.completeTs = 0;
-172  }
-173
-174  public int getWorkers() {
-175return workers;
-176  

[32/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/RestoreDriver.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/RestoreDriver.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/RestoreDriver.html
index 658bfab..7279ea9 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/RestoreDriver.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/RestoreDriver.html
@@ -55,16 +55,16 @@
 047import 
org.apache.hadoop.hbase.backup.impl.BackupManager;
 048import 
org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
 049import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-050import 
org.apache.yetus.audience.InterfaceAudience;
-051import org.slf4j.Logger;
-052import org.slf4j.LoggerFactory;
-053import 
org.apache.hadoop.hbase.client.Connection;
-054import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-055import 
org.apache.hadoop.hbase.util.AbstractHBaseTool;
-056import 
org.apache.hadoop.hbase.util.FSUtils;
-057import 
org.apache.hadoop.util.ToolRunner;
-058import org.apache.log4j.Level;
-059import org.apache.log4j.LogManager;
+050import 
org.apache.hadoop.hbase.client.Connection;
+051import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+052import 
org.apache.hadoop.hbase.util.AbstractHBaseTool;
+053import 
org.apache.hadoop.hbase.util.FSUtils;
+054import 
org.apache.hadoop.util.ToolRunner;
+055import org.apache.log4j.Level;
+056import org.apache.log4j.LogManager;
+057import 
org.apache.yetus.audience.InterfaceAudience;
+058import org.slf4j.Logger;
+059import org.slf4j.LoggerFactory;
 060
 061/**
 062 *
@@ -73,213 +73,215 @@
 065 */
 066@InterfaceAudience.Private
 067public class RestoreDriver extends 
AbstractHBaseTool {
-068
-069  private static final Logger LOG = 
LoggerFactory.getLogger(RestoreDriver.class);
-070  private CommandLine cmd;
-071
-072  private static final String 
USAGE_STRING =
-073  "Usage: hbase restore 
backup_path backup_id [options]\n"
-074  + "  backup_path Path to a 
backup destination root\n"
-075  + "  backup_id   Backup 
image ID to restore\n"
-076  + "  table(s)
Comma-separated list of tables to restore\n";
-077
-078  private static final String 
USAGE_FOOTER = "";
-079
-080  protected RestoreDriver() throws 
IOException {
-081init();
-082  }
-083
-084  protected void init() throws 
IOException {
-085// disable irrelevant loggers to 
avoid it mess up command output
-086
LogUtils.disableZkAndClientLoggers();
-087  }
-088
-089  private int parseAndRun(String[] args) 
throws IOException {
-090// Check if backup is enabled
-091if 
(!BackupManager.isBackupEnabled(getConf())) {
-092  
System.err.println(BackupRestoreConstants.ENABLE_BACKUP);
-093  return -1;
-094}
-095
-096
System.out.println(BackupRestoreConstants.VERIFY_BACKUP);
-097
-098// enable debug logging
-099if (cmd.hasOption(OPTION_DEBUG)) {
-100  
LogManager.getLogger("org.apache.hadoop.hbase.backup").setLevel(Level.DEBUG);
-101}
-102
-103// whether to overwrite to existing 
table if any, false by default
-104boolean overwrite = 
cmd.hasOption(OPTION_OVERWRITE);
-105if (overwrite) {
-106  LOG.debug("Found -overwrite option 
in restore command, "
-107  + "will overwrite to existing 
table if any in the restore target");
-108}
-109
-110// whether to only check the 
dependencies, false by default
-111boolean check = 
cmd.hasOption(OPTION_CHECK);
-112if (check) {
-113  LOG.debug("Found -check option in 
restore command, "
-114  + "will check and verify the 
dependencies");
-115}
-116
-117if (cmd.hasOption(OPTION_SET) 
 cmd.hasOption(OPTION_TABLE)) {
-118  System.err.println("Options -s and 
-t are mutaully exclusive,"+
-119  " you can not specify both of 
them.");
-120  printToolUsage();
-121  return -1;
-122}
-123
-124if (!cmd.hasOption(OPTION_SET) 
 !cmd.hasOption(OPTION_TABLE)) {
-125  System.err.println("You have to 
specify either set name or table list to restore");
-126  printToolUsage();
-127  return -1;
-128}
-129
-130if 
(cmd.hasOption(OPTION_YARN_QUEUE_NAME)) {
-131  String queueName = 
cmd.getOptionValue(OPTION_YARN_QUEUE_NAME);
-132  // Set system property value for MR 
job
-133  
System.setProperty("mapreduce.job.queuename", queueName);
-134}
-135
-136// parse main restore command 
options
-137String[] remainArgs = 
cmd.getArgs();
-138if (remainArgs.length != 2) {
-139  printToolUsage();
-140  return -1;
-141}
-142
-143String backupRootDir = 
remainArgs[0];
-144String backupId = remainArgs[1];
-145String tables = null;
-146String tableMapping =
-147
cmd.hasOption(OPTION_TABLE_MAPPING) ? cmd.getOptionValue(OPTION_TABLE_MAPPING) 
: null;
-148try (final 

[33/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupMergeJob.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupMergeJob.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupMergeJob.html
index 00b2c53..c96af49 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupMergeJob.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupMergeJob.html
@@ -38,11 +38,11 @@
 030
 031@InterfaceAudience.Private
 032public interface BackupMergeJob extends 
Configurable {
-033
-034  /**
-035   * Run backup merge operation
+033  /**
+034   * Run backup merge operation.
+035   *
 036   * @param backupIds backup image ids
-037   * @throws IOException
+037   * @throws IOException if the backup 
merge operation fails
 038   */
 039  void run(String[] backupIds) throws 
IOException;
 040}

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
index 7e1a37b..e68377f 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
@@ -37,105 +37,99 @@
 029  /*
 030   * Backup/Restore constants
 031   */
-032  public final static String 
BACKUP_SYSTEM_TABLE_NAME_KEY = "hbase.backup.system.table.name";
-033  public final static String 
BACKUP_SYSTEM_TABLE_NAME_DEFAULT = "backup:system";
+032  String BACKUP_SYSTEM_TABLE_NAME_KEY = 
"hbase.backup.system.table.name";
+033  String BACKUP_SYSTEM_TABLE_NAME_DEFAULT 
= "backup:system";
 034
-035  public final static String 
BACKUP_SYSTEM_TTL_KEY = "hbase.backup.system.ttl";
+035  String BACKUP_SYSTEM_TTL_KEY = 
"hbase.backup.system.ttl";
 036
-037  public final static int 
BACKUP_SYSTEM_TTL_DEFAULT = HConstants.FOREVER;
-038  public final static String 
BACKUP_ENABLE_KEY = "hbase.backup.enable";
-039  public final static boolean 
BACKUP_ENABLE_DEFAULT = false;
+037  int BACKUP_SYSTEM_TTL_DEFAULT = 
HConstants.FOREVER;
+038  String BACKUP_ENABLE_KEY = 
"hbase.backup.enable";
+039  boolean BACKUP_ENABLE_DEFAULT = 
false;
 040
-041
-042  public static final String 
BACKUP_MAX_ATTEMPTS_KEY = "hbase.backup.attempts.max";
-043  public static final int 
DEFAULT_BACKUP_MAX_ATTEMPTS = 10;
-044
-045  public static final String 
BACKUP_ATTEMPTS_PAUSE_MS_KEY = "hbase.backup.attempts.pause.ms";
-046  public static final int 
DEFAULT_BACKUP_ATTEMPTS_PAUSE_MS = 1;
-047
-048  /*
-049   *  Drivers option list
-050   */
-051  public static final String 
OPTION_OVERWRITE = "o";
-052  public static final String 
OPTION_OVERWRITE_DESC =
-053  "Overwrite data if any of the 
restore target tables exists";
-054
-055  public static final String OPTION_CHECK 
= "c";
-056  public static final String 
OPTION_CHECK_DESC =
-057  "Check restore sequence and 
dependencies only (does not execute the command)";
-058
-059  public static final String OPTION_SET = 
"s";
-060  public static final String 
OPTION_SET_DESC = "Backup set name";
-061  public static final String 
OPTION_SET_RESTORE_DESC =
-062  "Backup set to restore, mutually 
exclusive with -t (table list)";
-063  public static final String 
OPTION_SET_BACKUP_DESC =
-064  "Backup set to backup, mutually 
exclusive with -t (table list)";
-065  public static final String OPTION_DEBUG 
= "d";
-066  public static final String 
OPTION_DEBUG_DESC = "Enable debug loggings";
+041  String BACKUP_MAX_ATTEMPTS_KEY = 
"hbase.backup.attempts.max";
+042  int DEFAULT_BACKUP_MAX_ATTEMPTS = 10;
+043
+044  String BACKUP_ATTEMPTS_PAUSE_MS_KEY = 
"hbase.backup.attempts.pause.ms";
+045  int DEFAULT_BACKUP_ATTEMPTS_PAUSE_MS = 
1;
+046
+047  /*
+048   *  Drivers option list
+049   */
+050  String OPTION_OVERWRITE = "o";
+051  String OPTION_OVERWRITE_DESC = 
"Overwrite data if any of the restore target tables exists";
+052
+053  String OPTION_CHECK = "c";
+054  String OPTION_CHECK_DESC =
+055  "Check restore sequence and 
dependencies only (does not execute the command)";
+056
+057  String OPTION_SET = "s";
+058  String OPTION_SET_DESC = "Backup set 
name";
+059  String OPTION_SET_RESTORE_DESC = 
"Backup set to restore, mutually exclusive with -t (table list)";
+060  String OPTION_SET_BACKUP_DESC = "Backup 
set to backup, mutually exclusive with -t (table list)";
+061  String OPTION_DEBUG = "d";
+062  String OPTION_DEBUG_DESC = "Enable 
debug loggings";
+063
+064  String OPTION_TABLE = "t";
+065  String OPTION_TABLE_DESC = "Table name. 
If 

[25/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
index eb9e252..667152a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HelpCommand.html
@@ -28,22 +28,22 @@
 020
 021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
 022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 039
 040import java.io.IOException;
 041import java.net.URI;
@@ -70,194 +70,194 @@
 062import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 063import 
org.apache.hadoop.hbase.client.Connection;
 064import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-065import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-066import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-067import 
org.apache.yetus.audience.InterfaceAudience;
-068
-069/**
-070 * General backup commands, options and 
usage messages
-071 */
-072
+065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+066import 
org.apache.yetus.audience.InterfaceAudience;
+067
+068import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+069
+070/**
+071 * General backup commands, options and 
usage messages
+072 */
 073@InterfaceAudience.Private
 074public final class BackupCommands {
-075
-076  public final static String 
INCORRECT_USAGE = "Incorrect usage";
-077
-078  public final static String 
TOP_LEVEL_NOT_ALLOWED =
-079  "Top level (root) folder is not 
allowed to be a backup destination";
-080
-081  public static final String USAGE = 
"Usage: hbase backup 

[21/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.RepairCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.RepairCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.RepairCommand.html
index eb9e252..667152a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.RepairCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.RepairCommand.html
@@ -28,22 +28,22 @@
 020
 021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
 022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 039
 040import java.io.IOException;
 041import java.net.URI;
@@ -70,194 +70,194 @@
 062import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 063import 
org.apache.hadoop.hbase.client.Connection;
 064import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-065import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-066import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-067import 
org.apache.yetus.audience.InterfaceAudience;
-068
-069/**
-070 * General backup commands, options and 
usage messages
-071 */
-072
+065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+066import 
org.apache.yetus.audience.InterfaceAudience;
+067
+068import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+069
+070/**
+071 * General backup commands, options and 
usage messages
+072 */
 073@InterfaceAudience.Private
 074public final class BackupCommands {
-075
-076  public final static String 
INCORRECT_USAGE = "Incorrect usage";
-077
-078  public final static String 
TOP_LEVEL_NOT_ALLOWED =
-079  "Top level (root) folder is not 
allowed to be a backup destination";
-080
-081  public static final String USAGE = 
"Usage: hbase 

[51/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
Published site at .


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/aa7ffc92
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/aa7ffc92
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/aa7ffc92

Branch: refs/heads/asf-site
Commit: aa7ffc92b6b1ff6f89116dd034c83fa5d477ec57
Parents: f9958bb
Author: jenkins 
Authored: Sun Jan 28 15:12:48 2018 +
Committer: jenkins 
Committed: Sun Jan 28 15:12:48 2018 +

--
 acid-semantics.html |4 +-
 apache_hbase_reference_guide.pdf|4 +-
 book.html   |2 +-
 bulk-loads.html |4 +-
 checkstyle-aggregate.html   | 3596 -
 checkstyle.rss  |  106 +-
 coc.html|4 +-
 cygwin.html |4 +-
 dependencies.html   |4 +-
 dependency-convergence.html |4 +-
 dependency-info.html|4 +-
 dependency-management.html  |4 +-
 devapidocs/constant-values.html |6 +-
 devapidocs/index-all.html   |   20 +-
 .../hbase/backup/BackupClientFactory.html   |   12 +-
 .../hadoop/hbase/backup/BackupCopyJob.html  |6 +-
 .../hadoop/hbase/backup/BackupHFileCleaner.html |   42 +-
 .../hbase/backup/BackupInfo.BackupPhase.html|   18 +-
 .../hbase/backup/BackupInfo.BackupState.html|   14 +-
 .../hadoop/hbase/backup/BackupInfo.Filter.html  |2 +-
 .../apache/hadoop/hbase/backup/BackupInfo.html  |  150 +-
 .../hadoop/hbase/backup/BackupMergeJob.html |6 +-
 .../BackupRestoreConstants.BackupCommand.html   |   44 +-
 .../hbase/backup/BackupRestoreConstants.html|   76 +-
 .../hbase/backup/BackupRestoreFactory.html  |   14 +-
 .../hadoop/hbase/backup/BackupTableInfo.html|   18 +-
 .../hadoop/hbase/backup/HBackupFileSystem.html  |6 +-
 .../apache/hadoop/hbase/backup/LogUtils.html|4 +-
 .../hadoop/hbase/backup/RestoreDriver.html  |   41 +-
 .../apache/hadoop/hbase/backup/RestoreJob.html  |4 +-
 .../hbase/backup/RestoreRequest.Builder.html|   20 +-
 .../hadoop/hbase/backup/RestoreRequest.html |   40 +-
 .../hbase/backup/impl/BackupAdminImpl.html  |   73 +-
 .../impl/BackupCommands.BackupSetCommand.html   |   32 +-
 .../backup/impl/BackupCommands.Command.html |   18 +-
 .../impl/BackupCommands.CreateCommand.html  |6 +-
 .../impl/BackupCommands.DeleteCommand.html  |   10 +-
 .../impl/BackupCommands.DescribeCommand.html|8 +-
 .../backup/impl/BackupCommands.HelpCommand.html |8 +-
 .../impl/BackupCommands.HistoryCommand.html |   21 +-
 .../impl/BackupCommands.MergeCommand.html   |   12 +-
 .../impl/BackupCommands.ProgressCommand.html|8 +-
 .../impl/BackupCommands.RepairCommand.html  |8 +-
 .../hbase/backup/impl/BackupCommands.html   |   34 +-
 .../hadoop/hbase/backup/impl/BackupManager.html |   72 +-
 .../BackupManifest.BackupImage.Builder.html |   20 +-
 .../backup/impl/BackupManifest.BackupImage.html |   30 +-
 .../hbase/backup/impl/BackupManifest.html   |   38 +-
 .../backup/impl/BackupSystemTable.WALItem.html  |   18 +-
 .../hbase/backup/impl/BackupSystemTable.html|  328 +-
 .../backup/impl/FullTableBackupClient.html  |   10 +-
 .../backup/impl/IncrementalBackupManager.html   |   20 +-
 .../impl/IncrementalTableBackupClient.html  |   24 +-
 .../hbase/backup/impl/RestoreTablesClient.html  |   32 +-
 .../backup/impl/TableBackupClient.Stage.html|   16 +-
 .../hbase/backup/impl/TableBackupClient.html|   47 +-
 .../backup/impl/class-use/BackupException.html  |   22 -
 .../class-use/BackupManifest.BackupImage.html   |2 +-
 .../class-use/BackupSystemTable.WALItem.html|2 +-
 .../impl/class-use/BackupSystemTable.html   |3 +-
 .../mapreduce/MapReduceBackupCopyJob.html   |2 +-
 .../mapreduce/MapReduceBackupMergeJob.html  |   42 +-
 .../backup/mapreduce/MapReduceRestoreJob.html   |   18 +-
 .../master/LogRollMasterProcedureManager.html   |   33 +-
 ...LogRollBackupSubprocedure.RSRollLogTask.html |6 +-
 .../regionserver/LogRollBackupSubprocedure.html |   13 +-
 ...cedureManager.BackupSubprocedureBuilder.html |6 +-
 .../LogRollRegionServerProcedureManager.html|   34 +-
 .../hadoop/hbase/backup/util/BackupUtils.html   |   83 +-
 .../hadoop/hbase/backup/util/RestoreTool.html   |   52 +-
 .../hadoop/hbase/class-use/TableName.html   |7 +-
 .../class-use/ForeignException.html |8 +-
 .../org/apache/hadoop/hbase/Version.html|6 +-
 .../hbase/backup/BackupClientFactory.html   |   

[09/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/TableBackupClient.Stage.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/TableBackupClient.Stage.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/TableBackupClient.Stage.html
index add44d1..efa6d95 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/TableBackupClient.Stage.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/TableBackupClient.Stage.html
@@ -43,13 +43,13 @@
 035import 
org.apache.hadoop.hbase.backup.BackupType;
 036import 
org.apache.hadoop.hbase.backup.HBackupFileSystem;
 037import 
org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage;
-038import 
org.apache.yetus.audience.InterfaceAudience;
-039import org.slf4j.Logger;
-040import org.slf4j.LoggerFactory;
-041import 
org.apache.hadoop.hbase.client.Admin;
-042import 
org.apache.hadoop.hbase.client.Connection;
-043import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-044import 
org.apache.hadoop.hbase.util.FSUtils;
+038import 
org.apache.hadoop.hbase.client.Admin;
+039import 
org.apache.hadoop.hbase.client.Connection;
+040import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+041import 
org.apache.hadoop.hbase.util.FSUtils;
+042import 
org.apache.yetus.audience.InterfaceAudience;
+043import org.slf4j.Logger;
+044import org.slf4j.LoggerFactory;
 045
 046import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
 047
@@ -88,360 +88,355 @@
 080  }
 081
 082  public void init(final Connection conn, 
final String backupId, BackupRequest request)
-083  throws IOException
-084  {
-085if (request.getBackupType() == 
BackupType.FULL) {
-086  backupManager = new 
BackupManager(conn, conn.getConfiguration());
-087} else {
-088  backupManager = new 
IncrementalBackupManager(conn, conn.getConfiguration());
-089}
-090this.backupId = backupId;
-091this.tableList = 
request.getTableList();
-092this.conn = conn;
-093this.conf = 
conn.getConfiguration();
-094this.fs = 
FSUtils.getCurrentFileSystem(conf);
-095backupInfo =
-096
backupManager.createBackupInfo(backupId, request.getBackupType(), tableList,
-097  request.getTargetRootDir(), 
request.getTotalTasks(), request.getBandwidth());
-098if (tableList == null || 
tableList.isEmpty()) {
-099  this.tableList = new 
ArrayList(backupInfo.getTables());
-100}
-101// Start new session
-102backupManager.startBackupSession();
-103  }
-104
-105  /**
-106   * Begin the overall backup.
-107   * @param backupInfo backup info
-108   * @throws IOException exception
-109   */
-110  protected void 
beginBackup(BackupManager backupManager, BackupInfo backupInfo)
-111  throws IOException {
-112
-113BackupSystemTable.snapshot(conn);
-114
backupManager.setBackupInfo(backupInfo);
-115// set the start timestamp of the 
overall backup
-116long startTs = 
EnvironmentEdgeManager.currentTime();
-117backupInfo.setStartTs(startTs);
-118// set overall backup status: 
ongoing
-119
backupInfo.setState(BackupState.RUNNING);
-120
backupInfo.setPhase(BackupPhase.REQUEST);
-121LOG.info("Backup " + 
backupInfo.getBackupId() + " started at " + startTs + ".");
-122
-123
backupManager.updateBackupInfo(backupInfo);
-124if (LOG.isDebugEnabled()) {
-125  LOG.debug("Backup session " + 
backupInfo.getBackupId() + " has been started.");
-126}
-127  }
-128
-129  protected String getMessage(Exception 
e) {
-130String msg = e.getMessage();
-131if (msg == null || msg.equals("")) 
{
-132  msg = e.getClass().getName();
-133}
-134return msg;
-135  }
-136
-137  /**
-138   * Delete HBase snapshot for backup.
-139   * @param backupInfo backup info
-140   * @throws Exception exception
-141   */
-142  protected static void 
deleteSnapshots(final Connection conn, BackupInfo backupInfo, Configuration 
conf)
-143  throws IOException {
-144LOG.debug("Trying to delete snapshot 
for full backup.");
-145for (String snapshotName : 
backupInfo.getSnapshotNames()) {
-146  if (snapshotName == null) {
-147continue;
-148  }
-149  LOG.debug("Trying to delete 
snapshot: " + snapshotName);
-150
-151  try (Admin admin = conn.getAdmin()) 
{
-152
admin.deleteSnapshot(snapshotName);
-153  }
-154  LOG.debug("Deleting the snapshot " 
+ snapshotName + " for backup " + backupInfo.getBackupId()
-155  + " succeeded.");
-156}
-157  }
-158
-159  /**
-160   * Clean up directories with prefix 
"exportSnapshot-", which are generated when exporting
-161   * snapshots.
-162   * @throws IOException exception
-163   */
-164  protected static void 
cleanupExportSnapshotLog(Configuration conf) throws IOException {
-165FileSystem fs = 

[26/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
index eb9e252..667152a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DescribeCommand.html
@@ -28,22 +28,22 @@
 020
 021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
 022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 039
 040import java.io.IOException;
 041import java.net.URI;
@@ -70,194 +70,194 @@
 062import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 063import 
org.apache.hadoop.hbase.client.Connection;
 064import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-065import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-066import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-067import 
org.apache.yetus.audience.InterfaceAudience;
-068
-069/**
-070 * General backup commands, options and 
usage messages
-071 */
-072
+065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+066import 
org.apache.yetus.audience.InterfaceAudience;
+067
+068import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+069
+070/**
+071 * General backup commands, options and 
usage messages
+072 */
 073@InterfaceAudience.Private
 074public final class BackupCommands {
-075
-076  public final static String 
INCORRECT_USAGE = "Incorrect usage";
-077
-078  public final static String 
TOP_LEVEL_NOT_ALLOWED =
-079  "Top level (root) folder is not 
allowed to be a backup destination";
-080
-081  public static final String USAGE = 

[17/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManifest.BackupImage.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManifest.BackupImage.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManifest.BackupImage.html
index 7509dcf..ec2aa41 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManifest.BackupImage.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManifest.BackupImage.html
@@ -64,152 +64,152 @@
 056 */
 057@InterfaceAudience.Private
 058public class BackupManifest {
-059
-060  private static final Logger LOG = 
LoggerFactory.getLogger(BackupManifest.class);
-061
-062  // manifest file name
-063  public static final String 
MANIFEST_FILE_NAME = ".backup.manifest";
-064
-065  /**
-066   * Backup image, the dependency graph 
is made up by series of backup images BackupImage contains
-067   * all the relevant information to 
restore the backup and is used during restore operation
-068   */
-069
-070  public static class BackupImage 
implements ComparableBackupImage {
+059  private static final Logger LOG = 
LoggerFactory.getLogger(BackupManifest.class);
+060
+061  // manifest file name
+062  public static final String 
MANIFEST_FILE_NAME = ".backup.manifest";
+063
+064  /**
+065   * Backup image, the dependency graph 
is made up by series of backup images BackupImage contains
+066   * all the relevant information to 
restore the backup and is used during restore operation
+067   */
+068  public static class BackupImage 
implements ComparableBackupImage {
+069static class Builder {
+070  BackupImage image;
 071
-072static class Builder {
-073  BackupImage image;
-074
-075  Builder() {
-076image = new BackupImage();
-077  }
-078
-079  Builder withBackupId(String 
backupId) {
-080image.setBackupId(backupId);
-081return this;
-082  }
-083
-084  Builder withType(BackupType type) 
{
-085image.setType(type);
-086return this;
-087  }
-088
-089  Builder withRootDir(String rootDir) 
{
-090image.setRootDir(rootDir);
-091return this;
-092  }
-093
-094  Builder 
withTableList(ListTableName tableList) {
-095image.setTableList(tableList);
-096return this;
-097  }
-098
-099  Builder withStartTime(long 
startTime) {
-100image.setStartTs(startTime);
-101return this;
-102  }
-103
-104  Builder withCompleteTime(long 
completeTime) {
-105
image.setCompleteTs(completeTime);
-106return this;
-107  }
-108
-109  BackupImage build() {
-110return image;
-111  }
-112
-113}
-114
-115private String backupId;
-116private BackupType type;
-117private String rootDir;
-118private ListTableName 
tableList;
-119private long startTs;
-120private long completeTs;
-121private ArrayListBackupImage 
ancestors;
-122private HashMapTableName, 
HashMapString, Long incrTimeRanges;
-123
-124static Builder newBuilder() {
-125  return new Builder();
-126}
-127
-128public BackupImage() {
-129  super();
-130}
-131
-132private BackupImage(String backupId, 
BackupType type, String rootDir,
-133ListTableName tableList, 
long startTs, long completeTs) {
-134  this.backupId = backupId;
-135  this.type = type;
-136  this.rootDir = rootDir;
-137  this.tableList = tableList;
-138  this.startTs = startTs;
-139  this.completeTs = completeTs;
-140}
-141
-142static BackupImage 
fromProto(BackupProtos.BackupImage im) {
-143  String backupId = 
im.getBackupId();
-144  String rootDir = 
im.getBackupRootDir();
-145  long startTs = im.getStartTs();
-146  long completeTs = 
im.getCompleteTs();
-147  ListHBaseProtos.TableName 
tableListList = im.getTableListList();
-148  ListTableName tableList = 
new ArrayListTableName();
-149  for (HBaseProtos.TableName tn : 
tableListList) {
-150
tableList.add(ProtobufUtil.toTableName(tn));
-151  }
-152
-153  
ListBackupProtos.BackupImage ancestorList = im.getAncestorsList();
-154
-155  BackupType type =
-156  im.getBackupType() == 
BackupProtos.BackupType.FULL ? BackupType.FULL
-157  : BackupType.INCREMENTAL;
-158
-159  BackupImage image = new 
BackupImage(backupId, type, rootDir, tableList, startTs, completeTs);
-160  for (BackupProtos.BackupImage img : 
ancestorList) {
-161
image.addAncestor(fromProto(img));
-162  }
-163  
image.setIncrTimeRanges(loadIncrementalTimestampMap(im));
-164  return image;
-165}
-166
-167BackupProtos.BackupImage toProto() 
{
-168  BackupProtos.BackupImage.Builder 
builder = BackupProtos.BackupImage.newBuilder();
-169  builder.setBackupId(backupId);
-170

[23/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.MergeCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.MergeCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.MergeCommand.html
index eb9e252..667152a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.MergeCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.MergeCommand.html
@@ -28,22 +28,22 @@
 020
 021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
 022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 039
 040import java.io.IOException;
 041import java.net.URI;
@@ -70,194 +70,194 @@
 062import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 063import 
org.apache.hadoop.hbase.client.Connection;
 064import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-065import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-066import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-067import 
org.apache.yetus.audience.InterfaceAudience;
-068
-069/**
-070 * General backup commands, options and 
usage messages
-071 */
-072
+065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+066import 
org.apache.yetus.audience.InterfaceAudience;
+067
+068import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+069
+070/**
+071 * General backup commands, options and 
usage messages
+072 */
 073@InterfaceAudience.Private
 074public final class BackupCommands {
-075
-076  public final static String 
INCORRECT_USAGE = "Incorrect usage";
-077
-078  public final static String 
TOP_LEVEL_NOT_ALLOWED =
-079  "Top level (root) folder is not 
allowed to be a backup destination";
-080
-081  public static final String USAGE = 
"Usage: hbase backup 

[28/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
index eb9e252..667152a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.CreateCommand.html
@@ -28,22 +28,22 @@
 020
 021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
 022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 039
 040import java.io.IOException;
 041import java.net.URI;
@@ -70,194 +70,194 @@
 062import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 063import 
org.apache.hadoop.hbase.client.Connection;
 064import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-065import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-066import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-067import 
org.apache.yetus.audience.InterfaceAudience;
-068
-069/**
-070 * General backup commands, options and 
usage messages
-071 */
-072
+065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+066import 
org.apache.yetus.audience.InterfaceAudience;
+067
+068import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+069
+070/**
+071 * General backup commands, options and 
usage messages
+072 */
 073@InterfaceAudience.Private
 074public final class BackupCommands {
-075
-076  public final static String 
INCORRECT_USAGE = "Incorrect usage";
-077
-078  public final static String 
TOP_LEVEL_NOT_ALLOWED =
-079  "Top level (root) folder is not 
allowed to be a backup destination";
-080
-081  public static final String USAGE = 
"Usage: hbase 

[36/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html
index af3aeef..ce38532 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupInfo.BackupState.html
@@ -40,14 +40,14 @@
 032import 
org.apache.commons.lang3.StringUtils;
 033import 
org.apache.hadoop.hbase.TableName;
 034import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-035import 
org.apache.yetus.audience.InterfaceAudience;
-036import org.slf4j.Logger;
-037import org.slf4j.LoggerFactory;
-038import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-039import 
org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;
-040import 
org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupInfo.Builder;
-041import 
org.apache.hadoop.hbase.util.Bytes;
-042
+035import 
org.apache.hadoop.hbase.util.Bytes;
+036import 
org.apache.yetus.audience.InterfaceAudience;
+037import org.slf4j.Logger;
+038import org.slf4j.LoggerFactory;
+039
+040import 
org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+041import 
org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;
+042import 
org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupInfo.Builder;
 043
 044/**
 045 * An object to encapsulate the 
information for each backup session
@@ -56,506 +56,503 @@
 048public class BackupInfo implements 
ComparableBackupInfo {
 049  private static final Logger LOG = 
LoggerFactory.getLogger(BackupInfo.class);
 050
-051  public static interface Filter {
-052
-053/**
-054 * Filter interface
-055 * @param info backup info
-056 * @return true if info passes 
filter, false otherwise
-057 */
-058public boolean apply(BackupInfo 
info);
-059  }
-060
-061  /**
-062   * Backup session states
-063   */
-064  public static enum BackupState {
-065RUNNING, COMPLETE, FAILED, ANY
-066  }
-067
-068  /**
-069   * BackupPhase - phases of an ACTIVE 
backup session (running), when state of a backup session is
-070   * BackupState.RUNNING
-071   */
-072  public static enum BackupPhase {
-073REQUEST, SNAPSHOT, 
PREPARE_INCREMENTAL, SNAPSHOTCOPY, INCREMENTAL_COPY, STORE_MANIFEST
-074  }
-075
-076  /**
-077   * Backup id
-078   */
-079  private String backupId;
-080
-081  /**
-082   * Backup type, full or incremental
-083   */
-084  private BackupType type;
-085
-086  /**
-087   * Target root directory for storing 
the backup files
-088   */
-089  private String backupRootDir;
-090
-091  /**
-092   * Backup state
-093   */
-094  private BackupState state;
-095
-096  /**
-097   * Backup phase
-098   */
-099  private BackupPhase phase = 
BackupPhase.REQUEST;
-100
-101  /**
-102   * Backup failure message
-103   */
-104  private String failedMsg;
-105
-106  /**
-107   * Backup status map for all tables
-108   */
-109  private MapTableName, 
BackupTableInfo backupTableInfoMap;
-110
-111  /**
-112   * Actual start timestamp of a backup 
process
-113   */
-114  private long startTs;
-115
-116  /**
-117   * Actual end timestamp of the backup 
process
-118   */
-119  private long completeTs;
-120
-121  /**
-122   * Total bytes of incremental logs 
copied
-123   */
-124  private long totalBytesCopied;
-125
-126  /**
-127   * For incremental backup, a location 
of a backed-up hlogs
-128   */
-129  private String hlogTargetDir = null;
-130
-131  /**
-132   * Incremental backup file list
-133   */
-134  private ListString 
incrBackupFileList;
-135
-136  /**
-137   * New region server log timestamps for 
table set after distributed log roll key - table name,
-138   * value - map of RegionServer hostname 
- last log rolled timestamp
-139   */
-140  private HashMapTableName, 
HashMapString, Long tableSetTimestampMap;
-141
-142  /**
-143   * Backup progress in %% (0-100)
-144   */
-145  private int progress;
-146
-147  /**
-148   * Number of parallel workers. -1 - 
system defined
-149   */
-150  private int workers = -1;
-151
-152  /**
-153   * Bandwidth per worker in MB per sec. 
-1 - unlimited
-154   */
-155  private long bandwidth = -1;
-156
-157  public BackupInfo() {
-158backupTableInfoMap = new 
HashMapTableName, BackupTableInfo();
-159  }
-160
-161  public BackupInfo(String backupId, 
BackupType type, TableName[] tables, String targetRootDir) {
-162this();
-163this.backupId = backupId;
-164this.type = type;
-165this.backupRootDir = targetRootDir;
-166this.addTables(tables);
-167if (type == BackupType.INCREMENTAL) 
{
-168  
setHLogTargetDir(BackupUtils.getLogBackupDir(targetRootDir, backupId));
-169}
-170this.startTs = 0;
-171this.completeTs = 0;
-172  }

[03/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/util/RestoreTool.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/util/RestoreTool.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/util/RestoreTool.html
index 4277d0a..36dbc3c 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/util/RestoreTool.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/util/RestoreTool.html
@@ -44,36 +44,36 @@
 036import 
org.apache.hadoop.hbase.backup.BackupRestoreFactory;
 037import 
org.apache.hadoop.hbase.backup.HBackupFileSystem;
 038import 
org.apache.hadoop.hbase.backup.RestoreJob;
-039import 
org.apache.yetus.audience.InterfaceAudience;
-040import org.slf4j.Logger;
-041import org.slf4j.LoggerFactory;
-042import 
org.apache.hadoop.hbase.client.Admin;
-043import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-044import 
org.apache.hadoop.hbase.client.Connection;
-045import 
org.apache.hadoop.hbase.client.TableDescriptor;
-046import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
-047import 
org.apache.hadoop.hbase.io.HFileLink;
-048import 
org.apache.hadoop.hbase.io.hfile.HFile;
+039import 
org.apache.hadoop.hbase.client.Admin;
+040import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+041import 
org.apache.hadoop.hbase.client.Connection;
+042import 
org.apache.hadoop.hbase.client.TableDescriptor;
+043import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+044import 
org.apache.hadoop.hbase.io.HFileLink;
+045import 
org.apache.hadoop.hbase.io.hfile.HFile;
+046import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
+047import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
+048import 
org.apache.hadoop.hbase.snapshot.SnapshotManifest;
 049import 
org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;
-050import 
org.apache.hadoop.hbase.regionserver.StoreFileInfo;
-051import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
-052import 
org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
-053import 
org.apache.hadoop.hbase.snapshot.SnapshotManifest;
-054import 
org.apache.hadoop.hbase.util.Bytes;
-055import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-056import 
org.apache.hadoop.hbase.util.FSTableDescriptors;
-057
-058/**
-059 * A collection for methods used by 
multiple classes to restore HBase tables.
-060 */
-061@InterfaceAudience.Private
-062public class RestoreTool {
-063
+050import 
org.apache.hadoop.hbase.util.Bytes;
+051import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+052import 
org.apache.hadoop.hbase.util.FSTableDescriptors;
+053import 
org.apache.yetus.audience.InterfaceAudience;
+054import org.slf4j.Logger;
+055import org.slf4j.LoggerFactory;
+056
+057import 
org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
+058
+059/**
+060 * A collection for methods used by 
multiple classes to restore HBase tables.
+061 */
+062@InterfaceAudience.Private
+063public class RestoreTool {
 064  public static final Logger LOG = 
LoggerFactory.getLogger(BackupUtils.class);
 065  private final static long 
TABLE_AVAILABILITY_WAIT_TIME = 18;
 066
 067  private final String[] ignoreDirs = { 
HConstants.RECOVERED_EDITS_DIR };
-068  protected Configuration conf = null;
+068  protected Configuration conf;
 069  protected Path backupRootPath;
 070  protected String backupId;
 071  protected FileSystem fs;
@@ -97,433 +97,426 @@
 089   * @throws IOException exception
 090   */
 091  Path getTableArchivePath(TableName 
tableName) throws IOException {
-092
-093Path baseDir =
-094new 
Path(HBackupFileSystem.getTableBackupPath(tableName, backupRootPath, 
backupId),
-095
HConstants.HFILE_ARCHIVE_DIRECTORY);
-096Path dataDir = new Path(baseDir, 
HConstants.BASE_NAMESPACE_DIR);
-097Path archivePath = new Path(dataDir, 
tableName.getNamespaceAsString());
-098Path tableArchivePath = new 
Path(archivePath, tableName.getQualifierAsString());
-099if (!fs.exists(tableArchivePath) || 
!fs.getFileStatus(tableArchivePath).isDirectory()) {
-100  LOG.debug("Folder tableArchivePath: 
" + tableArchivePath.toString() + " does not exists");
-101  tableArchivePath = null; // empty 
table has no archive
-102}
-103return tableArchivePath;
-104  }
-105
-106  /**
-107   * Gets region list
-108   * @param tableName table name
-109   * @return RegionList region list
-110   * @throws FileNotFoundException 
exception
-111   * @throws IOException exception
-112   */
-113  ArrayListPath 
getRegionList(TableName tableName) throws FileNotFoundException, IOException 
{
-114Path tableArchivePath = 
getTableArchivePath(tableName);
-115ArrayListPath regionDirList = 
new ArrayListPath();
-116FileStatus[] children = 
fs.listStatus(tableArchivePath);
-117for (FileStatus 

[42/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
index e1f2a76..cc21c6a 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public final class BackupSystemTable
+public final class BackupSystemTable
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.html?is-external=true;
 title="class or interface in java.io">Closeable
 This class provides API to access backup system table
@@ -973,7 +973,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -982,7 +982,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 tableName
-privateTableName tableName
+privateTableName tableName
 Backup system table (main) name
 
 
@@ -992,7 +992,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 bulkLoadTableName
-privateTableName bulkLoadTableName
+privateTableName bulkLoadTableName
 Backup System table name for bulk loaded files.
  We keep all bulk loaded file references in a separate table
  because we have to isolate general backup operations: create, merge etc
@@ -1006,7 +1006,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 SESSIONS_FAMILY
-static finalbyte[] SESSIONS_FAMILY
+static finalbyte[] SESSIONS_FAMILY
 Stores backup sessions (contexts)
 
 
@@ -1016,7 +1016,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 META_FAMILY
-static finalbyte[] META_FAMILY
+static finalbyte[] META_FAMILY
 Stores other meta
 
 
@@ -1026,7 +1026,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 BULK_LOAD_FAMILY
-static finalbyte[] BULK_LOAD_FAMILY
+static finalbyte[] BULK_LOAD_FAMILY
 
 
 
@@ -1035,7 +1035,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 connection
-private finalConnection connection
+private finalConnection connection
 Connection to HBase cluster, shared among all 
instances
 
 
@@ -1045,7 +1045,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 BACKUP_INFO_PREFIX
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String BACKUP_INFO_PREFIX
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String BACKUP_INFO_PREFIX
 
 See Also:
 Constant
 Field Values
@@ -1058,7 +1058,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 START_CODE_ROW
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String START_CODE_ROW
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String START_CODE_ROW
 
 See Also:
 Constant
 Field Values
@@ -1071,7 +1071,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 ACTIVE_SESSION_ROW
-private static finalbyte[] ACTIVE_SESSION_ROW
+private static finalbyte[] ACTIVE_SESSION_ROW
 
 
 
@@ -1080,7 +1080,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 ACTIVE_SESSION_COL
-private static finalbyte[] ACTIVE_SESSION_COL
+private static finalbyte[] ACTIVE_SESSION_COL
 
 
 
@@ -1089,7 +1089,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 ACTIVE_SESSION_YES
-private static finalbyte[] ACTIVE_SESSION_YES
+private static finalbyte[] ACTIVE_SESSION_YES
 
 
 
@@ -1098,7 +1098,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 ACTIVE_SESSION_NO
-private static finalbyte[] ACTIVE_SESSION_NO
+private static finalbyte[] ACTIVE_SESSION_NO
 
 
 
@@ -1107,7 +1107,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 INCR_BACKUP_SET
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String INCR_BACKUP_SET
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String INCR_BACKUP_SET
 
 See Also:
 Constant
 Field 

[14/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
index 3f74159..3445980 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.html
@@ -97,809 +97,809 @@
 089 * value = backupId and full WAL file 
name/li
 090 * /ul/p
 091 */
-092
-093@InterfaceAudience.Private
-094public final class BackupSystemTable 
implements Closeable {
-095  private static final Logger LOG = 
LoggerFactory.getLogger(BackupSystemTable.class);
-096
-097  static class WALItem {
-098String backupId;
-099String walFile;
-100String backupRoot;
-101
-102WALItem(String backupId, String 
walFile, String backupRoot) {
-103  this.backupId = backupId;
-104  this.walFile = walFile;
-105  this.backupRoot = backupRoot;
-106}
-107
-108public String getBackupId() {
-109  return backupId;
-110}
-111
-112public String getWalFile() {
-113  return walFile;
-114}
-115
-116public String getBackupRoot() {
-117  return backupRoot;
-118}
-119
-120@Override
-121public String toString() {
-122  return Path.SEPARATOR + backupRoot 
+ Path.SEPARATOR + backupId + Path.SEPARATOR + walFile;
-123}
+092@InterfaceAudience.Private
+093public final class BackupSystemTable 
implements Closeable {
+094  private static final Logger LOG = 
LoggerFactory.getLogger(BackupSystemTable.class);
+095
+096  static class WALItem {
+097String backupId;
+098String walFile;
+099String backupRoot;
+100
+101WALItem(String backupId, String 
walFile, String backupRoot) {
+102  this.backupId = backupId;
+103  this.walFile = walFile;
+104  this.backupRoot = backupRoot;
+105}
+106
+107public String getBackupId() {
+108  return backupId;
+109}
+110
+111public String getWalFile() {
+112  return walFile;
+113}
+114
+115public String getBackupRoot() {
+116  return backupRoot;
+117}
+118
+119@Override
+120public String toString() {
+121  return Path.SEPARATOR + backupRoot 
+ Path.SEPARATOR + backupId + Path.SEPARATOR + walFile;
+122}
+123  }
 124
-125  }
-126
-127  /**
-128   * Backup system table (main) name
-129   */
-130  private TableName tableName;
-131
-132  /**
-133   * Backup System table name for bulk 
loaded files.
-134   * We keep all bulk loaded file 
references in a separate table
-135   * because we have to isolate general 
backup operations: create, merge etc
-136   * from activity of RegionObserver, 
which controls process of a bulk loading
-137   * {@link 
org.apache.hadoop.hbase.backup.BackupObserver}
-138   */
-139
-140  private TableName bulkLoadTableName;
-141
-142  /**
-143   * Stores backup sessions (contexts)
-144   */
-145  final static byte[] SESSIONS_FAMILY = 
"session".getBytes();
-146  /**
-147   * Stores other meta
-148   */
-149  final static byte[] META_FAMILY = 
"meta".getBytes();
-150  final static byte[] BULK_LOAD_FAMILY = 
"bulk".getBytes();
-151  /**
-152   * Connection to HBase cluster, shared 
among all instances
-153   */
-154  private final Connection connection;
-155
-156  private final static String 
BACKUP_INFO_PREFIX = "session:";
-157  private final static String 
START_CODE_ROW = "startcode:";
-158  private final static byte[] 
ACTIVE_SESSION_ROW = "activesession:".getBytes();
-159  private final static byte[] 
ACTIVE_SESSION_COL = "c".getBytes();
+125  /**
+126   * Backup system table (main) name
+127   */
+128  private TableName tableName;
+129
+130  /**
+131   * Backup System table name for bulk 
loaded files.
+132   * We keep all bulk loaded file 
references in a separate table
+133   * because we have to isolate general 
backup operations: create, merge etc
+134   * from activity of RegionObserver, 
which controls process of a bulk loading
+135   * {@link 
org.apache.hadoop.hbase.backup.BackupObserver}
+136   */
+137  private TableName bulkLoadTableName;
+138
+139  /**
+140   * Stores backup sessions (contexts)
+141   */
+142  final static byte[] SESSIONS_FAMILY = 
"session".getBytes();
+143  /**
+144   * Stores other meta
+145   */
+146  final static byte[] META_FAMILY = 
"meta".getBytes();
+147  final static byte[] BULK_LOAD_FAMILY = 
"bulk".getBytes();
+148  /**
+149   * Connection to HBase cluster, shared 
among all instances
+150   */
+151  private final Connection connection;
+152
+153  private final static String 
BACKUP_INFO_PREFIX = "session:";
+154  private final static String 
START_CODE_ROW = "startcode:";
+155  private final static byte[] 
ACTIVE_SESSION_ROW = "activesession:".getBytes();
+156  private final static 

[12/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
index 4febd01..ef680de 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.html
@@ -71,203 +71,203 @@
 063   * @return The new HashMap of RS log 
time stamps after the log roll for this incremental backup.
 064   * @throws IOException exception
 065   */
-066  public HashMapString, Long 
getIncrBackupLogFileMap()
-067  throws IOException {
-068ListString logList;
-069HashMapString, Long 
newTimestamps;
-070HashMapString, Long 
previousTimestampMins;
-071
-072String savedStartCode = 
readBackupStartCode();
-073
-074// key: tableName
-075// value: 
RegionServer,PreviousTimeStamp
-076HashMapTableName, 
HashMapString, Long previousTimestampMap = readLogTimestampMap();
-077
-078previousTimestampMins = 
BackupUtils.getRSLogTimestampMins(previousTimestampMap);
-079
-080if (LOG.isDebugEnabled()) {
-081  LOG.debug("StartCode " + 
savedStartCode + "for backupID " + backupInfo.getBackupId());
-082}
-083// get all new log files from .logs 
and .oldlogs after last TS and before new timestamp
-084if (savedStartCode == null || 
previousTimestampMins == null
-085|| 
previousTimestampMins.isEmpty()) {
-086  throw new IOException(
-087  "Cannot read any previous back 
up timestamps from backup system table. "
-088  + "In order to create an 
incremental backup, at least one full backup is needed.");
-089}
-090
-091LOG.info("Execute roll log procedure 
for incremental backup ...");
-092HashMapString, String props = 
new HashMapString, String();
-093props.put("backupRoot", 
backupInfo.getBackupRootDir());
-094
-095try (Admin admin = conn.getAdmin()) 
{
-096  
admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE,
-097
LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props);
-098}
-099newTimestamps = 
readRegionServerLastLogRollResult();
-100
-101logList = 
getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, 
savedStartCode);
-102ListWALItem 
logFromSystemTable =
-103
getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps, 
getBackupInfo()
-104.getBackupRootDir());
-105logList = 
excludeAlreadyBackedUpWALs(logList, logFromSystemTable);
-106
backupInfo.setIncrBackupFileList(logList);
-107
-108return newTimestamps;
-109  }
-110
-111  /**
-112   * Get list of WAL files eligible for 
incremental backup
+066  public HashMapString, Long 
getIncrBackupLogFileMap() throws IOException {
+067ListString logList;
+068HashMapString, Long 
newTimestamps;
+069HashMapString, Long 
previousTimestampMins;
+070
+071String savedStartCode = 
readBackupStartCode();
+072
+073// key: tableName
+074// value: 
RegionServer,PreviousTimeStamp
+075HashMapTableName, 
HashMapString, Long previousTimestampMap = readLogTimestampMap();
+076
+077previousTimestampMins = 
BackupUtils.getRSLogTimestampMins(previousTimestampMap);
+078
+079if (LOG.isDebugEnabled()) {
+080  LOG.debug("StartCode " + 
savedStartCode + "for backupID " + backupInfo.getBackupId());
+081}
+082// get all new log files from .logs 
and .oldlogs after last TS and before new timestamp
+083if (savedStartCode == null || 
previousTimestampMins == null
+084|| 
previousTimestampMins.isEmpty()) {
+085  throw new IOException(
+086  "Cannot read any previous back 
up timestamps from backup system table. "
+087  + "In order to create an 
incremental backup, at least one full backup is needed.");
+088}
+089
+090LOG.info("Execute roll log procedure 
for incremental backup ...");
+091HashMapString, String props = 
new HashMap();
+092props.put("backupRoot", 
backupInfo.getBackupRootDir());
+093
+094try (Admin admin = conn.getAdmin()) 
{
+095  
admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE,
+096
LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props);
+097}
+098newTimestamps = 
readRegionServerLastLogRollResult();
+099
+100logList = 
getLogFilesForNewBackup(previousTimestampMins, newTimestamps, conf, 
savedStartCode);
+101ListWALItem 
logFromSystemTable =
+102
getLogFilesFromBackupSystem(previousTimestampMins, newTimestamps, 
getBackupInfo()
+103.getBackupRootDir());
+104logList = 
excludeAlreadyBackedUpWALs(logList, logFromSystemTable);
+105

[05/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/regionserver/LogRollRegionServerProcedureManager.BackupSubprocedureBuilder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/regionserver/LogRollRegionServerProcedureManager.BackupSubprocedureBuilder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/regionserver/LogRollRegionServerProcedureManager.BackupSubprocedureBuilder.html
index 2d7a3e0..2c2ce10 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/regionserver/LogRollRegionServerProcedureManager.BackupSubprocedureBuilder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/regionserver/LogRollRegionServerProcedureManager.BackupSubprocedureBuilder.html
@@ -35,14 +35,14 @@
 027import 
org.apache.hadoop.hbase.backup.impl.BackupManager;
 028import 
org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager;
 029import 
org.apache.hadoop.hbase.coordination.ZkCoordinatedStateManager;
-030import 
org.apache.yetus.audience.InterfaceAudience;
-031import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
-032import 
org.apache.hadoop.hbase.procedure.ProcedureMember;
-033import 
org.apache.hadoop.hbase.procedure.ProcedureMemberRpcs;
-034import 
org.apache.hadoop.hbase.procedure.RegionServerProcedureManager;
-035import 
org.apache.hadoop.hbase.procedure.Subprocedure;
-036import 
org.apache.hadoop.hbase.procedure.SubprocedureFactory;
-037import 
org.apache.hadoop.hbase.regionserver.RegionServerServices;
+030import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
+031import 
org.apache.hadoop.hbase.procedure.ProcedureMember;
+032import 
org.apache.hadoop.hbase.procedure.ProcedureMemberRpcs;
+033import 
org.apache.hadoop.hbase.procedure.RegionServerProcedureManager;
+034import 
org.apache.hadoop.hbase.procedure.Subprocedure;
+035import 
org.apache.hadoop.hbase.procedure.SubprocedureFactory;
+036import 
org.apache.hadoop.hbase.regionserver.RegionServerServices;
+037import 
org.apache.yetus.audience.InterfaceAudience;
 038import 
org.apache.zookeeper.KeeperException;
 039import org.slf4j.Logger;
 040import org.slf4j.LoggerFactory;
@@ -60,134 +60,129 @@
 052 */
 053@InterfaceAudience.Private
 054public class 
LogRollRegionServerProcedureManager extends RegionServerProcedureManager {
-055
-056  private static final Logger LOG =
-057  
LoggerFactory.getLogger(LogRollRegionServerProcedureManager.class);
-058
-059  /** Conf key for number of request 
threads to start backup on region servers */
-060  public static final String 
BACKUP_REQUEST_THREADS_KEY = "hbase.backup.region.pool.threads";
-061  /** # of threads for backup work on the 
rs. */
-062  public static final int 
BACKUP_REQUEST_THREADS_DEFAULT = 10;
-063
-064  public static final String 
BACKUP_TIMEOUT_MILLIS_KEY = "hbase.backup.timeout";
-065  public static final long 
BACKUP_TIMEOUT_MILLIS_DEFAULT = 6;
-066
-067  /** Conf key for millis between checks 
to see if backup work completed or if there are errors */
-068  public static final String 
BACKUP_REQUEST_WAKE_MILLIS_KEY = "hbase.backup.region.wakefrequency";
-069  /** Default amount of time to check for 
errors while regions finish backup work */
-070  private static final long 
BACKUP_REQUEST_WAKE_MILLIS_DEFAULT = 500;
-071
-072  private RegionServerServices rss;
-073  private ProcedureMemberRpcs 
memberRpcs;
-074  private ProcedureMember member;
-075  private boolean started = false;
-076
-077  /**
-078   * Create a default backup procedure 
manager
-079   */
-080  public 
LogRollRegionServerProcedureManager() {
-081  }
-082
-083  /**
-084   * Start accepting backup procedure 
requests.
-085   */
-086  @Override
-087  public void start() {
-088if 
(!BackupManager.isBackupEnabled(rss.getConfiguration())) {
-089  LOG.warn("Backup is not enabled. 
Check your " + BackupRestoreConstants.BACKUP_ENABLE_KEY
-090  + " setting");
-091  return;
-092}
-093
this.memberRpcs.start(rss.getServerName().toString(), member);
-094started = true;
-095LOG.info("Started region server 
backup manager.");
-096  }
-097
-098  /**
-099   * Close ttthis/tt and 
all running backup procedure tasks
-100   * @param force forcefully stop all 
running tasks
-101   * @throws IOException exception
-102   */
-103  @Override
-104  public void stop(boolean force) throws 
IOException {
-105if (!started) {
-106  return;
-107}
-108String mode = force ? "abruptly" : 
"gracefully";
-109LOG.info("Stopping 
RegionServerBackupManager " + mode + ".");
-110
-111try {
-112  this.member.close();
-113} finally {
-114  this.memberRpcs.close();
-115}
-116  }
-117
-118  /**
-119   * If in a running state, creates the 
specified subprocedure for handling a backup procedure.
-120   * @return Subprocedure to submit to 
the ProcedureMemeber.
-121   */

[02/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/export_control.html
--
diff --git a/export_control.html b/export_control.html
index 5f38c0a..a457e51 100644
--- a/export_control.html
+++ b/export_control.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  
   Export Control
@@ -336,7 +336,7 @@ for more details.
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-01-27
+  Last Published: 
2018-01-28
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/hbase-annotations/checkstyle.html
--
diff --git a/hbase-annotations/checkstyle.html 
b/hbase-annotations/checkstyle.html
index e3b79d3..4602966 100644
--- a/hbase-annotations/checkstyle.html
+++ b/hbase-annotations/checkstyle.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  Checkstyle Results
 
@@ -150,7 +150,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-01-27
+  Last Published: 
2018-01-28
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/hbase-annotations/dependencies.html
--
diff --git a/hbase-annotations/dependencies.html 
b/hbase-annotations/dependencies.html
index 5c0b7eb..4ff9ca6 100644
--- a/hbase-annotations/dependencies.html
+++ b/hbase-annotations/dependencies.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  Project Dependencies
 
@@ -272,7 +272,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-01-27
+  Last Published: 
2018-01-28
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/hbase-annotations/dependency-convergence.html
--
diff --git a/hbase-annotations/dependency-convergence.html 
b/hbase-annotations/dependency-convergence.html
index fce9a76..5f180fd 100644
--- a/hbase-annotations/dependency-convergence.html
+++ b/hbase-annotations/dependency-convergence.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  Reactor Dependency 
Convergence
 
@@ -865,7 +865,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-01-27
+  Last Published: 
2018-01-28
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/hbase-annotations/dependency-info.html
--
diff --git a/hbase-annotations/dependency-info.html 
b/hbase-annotations/dependency-info.html
index bac658b..542ca0c 100644
--- a/hbase-annotations/dependency-info.html
+++ b/hbase-annotations/dependency-info.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  Dependency Information
 
@@ -147,7 +147,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-01-27
+  Last Published: 
2018-01-28
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/hbase-annotations/dependency-management.html
--
diff --git a/hbase-annotations/dependency-management.html 
b/hbase-annotations/dependency-management.html
index a13777d..3db18fb 100644
--- a/hbase-annotations/dependency-management.html
+++ b/hbase-annotations/dependency-management.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Annotations  Project Dependency 
Management
 
@@ -810,7 +810,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-01-27
+  Last Published: 
2018-01-28
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/hbase-annotations/index.html
--
diff --git a/hbase-annotations/index.html b/hbase-annotations/index.html
index f816f0f..c88bab2 100644
--- 

[46/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/org/apache/hadoop/hbase/backup/RestoreDriver.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/RestoreDriver.html 
b/devapidocs/org/apache/hadoop/hbase/backup/RestoreDriver.html
index 1ab6618..f1af18c 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/RestoreDriver.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/RestoreDriver.html
@@ -277,7 +277,7 @@ extends 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -286,7 +286,7 @@ extends 
 
 cmd
-privateorg.apache.commons.cli.CommandLine cmd
+privateorg.apache.commons.cli.CommandLine cmd
 
 
 
@@ -295,7 +295,7 @@ extends 
 
 USAGE_STRING
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String USAGE_STRING
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String USAGE_STRING
 
 See Also:
 Constant
 Field Values
@@ -308,7 +308,7 @@ extends 
 
 USAGE_FOOTER
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String USAGE_FOOTER
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String USAGE_FOOTER
 
 See Also:
 Constant
 Field Values
@@ -329,7 +329,7 @@ extends 
 
 RestoreDriver
-protectedRestoreDriver()
+protectedRestoreDriver()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -351,12 +351,7 @@ extends 
 
 init
-protectedvoidinit()
- throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
-
-Throws:
-http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
-
+protectedvoidinit()
 
 
 
@@ -365,7 +360,7 @@ extends 
 
 parseAndRun
-privateintparseAndRun(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
+privateintparseAndRun(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -379,7 +374,7 @@ extends 
 
 getTablesForSet
-privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetTablesForSet(Connectionconn,
+privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetTablesForSet(Connectionconn,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">Stringname,
org.apache.hadoop.conf.Configurationconf)
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
@@ -395,7 +390,7 @@ extends 
 
 addOptions
-protectedvoidaddOptions()
+protectedvoidaddOptions()
 Description copied from 
class:AbstractHBaseTool
 Override this to add command-line options using AbstractHBaseTool.addOptWithArg(java.lang.String,
 java.lang.String)
  and similar methods.
@@ -411,7 +406,7 @@ extends 
 
 processOptions
-protectedvoidprocessOptions(org.apache.commons.cli.CommandLinecmd)
+protectedvoidprocessOptions(org.apache.commons.cli.CommandLinecmd)
 Description copied from 
class:AbstractHBaseTool
 This method is called to process the options after they 
have been parsed.
 
@@ -426,7 +421,7 @@ extends 
 
 doWork
-protectedintdoWork()
+protectedintdoWork()
   throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 Description copied from 
class:AbstractHBaseTool
 The "main function" of the tool
@@ -444,7 +439,7 @@ extends 
 
 main
-public staticvoidmain(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
+public staticvoidmain(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
  throws http://docs.oracle.com/javase/8/docs/api/java/lang/Exception.html?is-external=true;
 title="class or interface in java.lang">Exception
 

[29/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
index eb9e252..667152a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.Command.html
@@ -28,22 +28,22 @@
 020
 021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
 022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 039
 040import java.io.IOException;
 041import java.net.URI;
@@ -70,194 +70,194 @@
 062import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 063import 
org.apache.hadoop.hbase.client.Connection;
 064import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-065import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-066import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-067import 
org.apache.yetus.audience.InterfaceAudience;
-068
-069/**
-070 * General backup commands, options and 
usage messages
-071 */
-072
+065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+066import 
org.apache.yetus.audience.InterfaceAudience;
+067
+068import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+069
+070/**
+071 * General backup commands, options and 
usage messages
+072 */
 073@InterfaceAudience.Private
 074public final class BackupCommands {
-075
-076  public final static String 
INCORRECT_USAGE = "Incorrect usage";
-077
-078  public final static String 
TOP_LEVEL_NOT_ALLOWED =
-079  "Top level (root) folder is not 
allowed to be a backup destination";
-080
-081  public static final String USAGE = 
"Usage: hbase backup COMMAND [command-specific 

[50/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/checkstyle-aggregate.html
--
diff --git a/checkstyle-aggregate.html b/checkstyle-aggregate.html
index e492b45..865c777 100644
--- a/checkstyle-aggregate.html
+++ b/checkstyle-aggregate.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase  Checkstyle Results
 
@@ -289,7 +289,7 @@
 3511
 0
 0
-17497
+17257
 
 Files
 
@@ -949,180 +949,20 @@
 0
 3
 
-org/apache/hadoop/hbase/backup/BackupClientFactory.java
-0
-0
-3
-
-org/apache/hadoop/hbase/backup/BackupCopyJob.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/BackupHFileCleaner.java
-0
-0
-3
-
-org/apache/hadoop/hbase/backup/BackupInfo.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/BackupMergeJob.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/BackupRestoreConstants.java
-0
-0
-4
-
-org/apache/hadoop/hbase/backup/BackupRestoreFactory.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/BackupTableInfo.java
-0
-0
-1
-
 org/apache/hadoop/hbase/backup/FailedArchiveException.java
 0
 0
 1
 
-org/apache/hadoop/hbase/backup/HBackupFileSystem.java
-0
-0
-1
-
 org/apache/hadoop/hbase/backup/HFileArchiver.java
 0
 0
 19
-
-org/apache/hadoop/hbase/backup/LogUtils.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/RestoreDriver.java
-0
-0
-2
-
-org/apache/hadoop/hbase/backup/RestoreJob.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/RestoreRequest.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/TestBackupBase.java
-0
-0
-13
-
-org/apache/hadoop/hbase/backup/TestBackupBoundaryTests.java
-0
-0
-5
-
-org/apache/hadoop/hbase/backup/TestBackupDelete.java
-0
-0
-2
 
-org/apache/hadoop/hbase/backup/TestBackupDeleteRestore.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/TestBackupDeleteWithFailures.java
-0
-0
-16
-
-org/apache/hadoop/hbase/backup/TestBackupDescribe.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/TestBackupHFileCleaner.java
-0
-0
-4
-
-org/apache/hadoop/hbase/backup/TestBackupShowHistory.java
-0
-0
-2
-
-org/apache/hadoop/hbase/backup/TestBackupStatusProgress.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/TestBackupSystemTable.java
-0
-0
-2
-
-org/apache/hadoop/hbase/backup/TestFullBackupSet.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/TestFullRestore.java
-0
-0
-11
-
 org/apache/hadoop/hbase/backup/TestHFileArchiving.java
 0
 0
 13
-
-org/apache/hadoop/hbase/backup/TestIncrementalBackupMergeWithFailures.java
-0
-0
-3
-
-org/apache/hadoop/hbase/backup/TestIncrementalBackupWithBulkLoad.java
-0
-0
-6
-
-org/apache/hadoop/hbase/backup/TestRemoteBackup.java
-0
-0
-2
-
-org/apache/hadoop/hbase/backup/TestRemoteRestore.java
-0
-0
-2
-
-org/apache/hadoop/hbase/backup/TestRepairAfterFailedDelete.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/TestRestoreBoundaryTests.java
-0
-0
-2
-
-org/apache/hadoop/hbase/backup/TestSystemTableSnapshot.java
-0
-0
-1
 
 org/apache/hadoop/hbase/backup/example/HFileArchiveManager.java
 0
@@ -1149,106 +989,6 @@
 0
 2
 
-org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.java
-0
-0
-15
-
-org/apache/hadoop/hbase/backup/impl/BackupCommands.java
-0
-0
-57
-
-org/apache/hadoop/hbase/backup/impl/BackupManager.java
-0
-0
-3
-
-org/apache/hadoop/hbase/backup/impl/BackupManifest.java
-0
-0
-2
-
-org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
-0
-0
-21
-
-org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.java
-0
-0
-3
-
-org/apache/hadoop/hbase/backup/impl/IncrementalBackupManager.java
-0
-0
-4
-
-org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.java
-0
-0
-9
-
-org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/impl/TableBackupClient.java
-0
-0
-4
-
-org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.java
-0
-0
-2
-
-org/apache/hadoop/hbase/backup/mapreduce/MapReduceHFileSplitterJob.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/mapreduce/MapReduceRestoreJob.java
-0
-0
-4
-
-org/apache/hadoop/hbase/backup/master/BackupLogCleaner.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/master/LogRollMasterProcedureManager.java
-0
-0
-3
-
-org/apache/hadoop/hbase/backup/regionserver/LogRollBackupSubprocedure.java
-0
-0
-2
-
-org/apache/hadoop/hbase/backup/regionserver/LogRollBackupSubprocedurePool.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/regionserver/LogRollRegionServerProcedureManager.java
-0
-0
-1
-
-org/apache/hadoop/hbase/backup/util/BackupUtils.java
-0
-0
-4
-
-org/apache/hadoop/hbase/backup/util/RestoreTool.java
-0
-0
-5
-
 org/apache/hadoop/hbase/chaos/actions/Action.java
 0
 0
@@ -11319,17 +11059,17 @@
 
 blocks
 http://checkstyle.sourceforge.net/config_blocks.html#EmptyBlock;>EmptyBlock
-48
+47
 Error
 
 
 http://checkstyle.sourceforge.net/config_blocks.html#LeftCurly;>LeftCurly
-208
+200
 Error
 
 
 http://checkstyle.sourceforge.net/config_blocks.html#NeedBraces;>NeedBraces
-2038
+2004
 Error
 
 coding
@@ -11359,12 +11099,12 @@
 
 design
 

[16/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManifest.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManifest.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManifest.html
index 7509dcf..ec2aa41 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManifest.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManifest.html
@@ -64,152 +64,152 @@
 056 */
 057@InterfaceAudience.Private
 058public class BackupManifest {
-059
-060  private static final Logger LOG = 
LoggerFactory.getLogger(BackupManifest.class);
-061
-062  // manifest file name
-063  public static final String 
MANIFEST_FILE_NAME = ".backup.manifest";
-064
-065  /**
-066   * Backup image, the dependency graph 
is made up by series of backup images BackupImage contains
-067   * all the relevant information to 
restore the backup and is used during restore operation
-068   */
-069
-070  public static class BackupImage 
implements ComparableBackupImage {
+059  private static final Logger LOG = 
LoggerFactory.getLogger(BackupManifest.class);
+060
+061  // manifest file name
+062  public static final String 
MANIFEST_FILE_NAME = ".backup.manifest";
+063
+064  /**
+065   * Backup image, the dependency graph 
is made up by series of backup images BackupImage contains
+066   * all the relevant information to 
restore the backup and is used during restore operation
+067   */
+068  public static class BackupImage 
implements ComparableBackupImage {
+069static class Builder {
+070  BackupImage image;
 071
-072static class Builder {
-073  BackupImage image;
-074
-075  Builder() {
-076image = new BackupImage();
-077  }
-078
-079  Builder withBackupId(String 
backupId) {
-080image.setBackupId(backupId);
-081return this;
-082  }
-083
-084  Builder withType(BackupType type) 
{
-085image.setType(type);
-086return this;
-087  }
-088
-089  Builder withRootDir(String rootDir) 
{
-090image.setRootDir(rootDir);
-091return this;
-092  }
-093
-094  Builder 
withTableList(ListTableName tableList) {
-095image.setTableList(tableList);
-096return this;
-097  }
-098
-099  Builder withStartTime(long 
startTime) {
-100image.setStartTs(startTime);
-101return this;
-102  }
-103
-104  Builder withCompleteTime(long 
completeTime) {
-105
image.setCompleteTs(completeTime);
-106return this;
-107  }
-108
-109  BackupImage build() {
-110return image;
-111  }
-112
-113}
-114
-115private String backupId;
-116private BackupType type;
-117private String rootDir;
-118private ListTableName 
tableList;
-119private long startTs;
-120private long completeTs;
-121private ArrayListBackupImage 
ancestors;
-122private HashMapTableName, 
HashMapString, Long incrTimeRanges;
-123
-124static Builder newBuilder() {
-125  return new Builder();
-126}
-127
-128public BackupImage() {
-129  super();
-130}
-131
-132private BackupImage(String backupId, 
BackupType type, String rootDir,
-133ListTableName tableList, 
long startTs, long completeTs) {
-134  this.backupId = backupId;
-135  this.type = type;
-136  this.rootDir = rootDir;
-137  this.tableList = tableList;
-138  this.startTs = startTs;
-139  this.completeTs = completeTs;
-140}
-141
-142static BackupImage 
fromProto(BackupProtos.BackupImage im) {
-143  String backupId = 
im.getBackupId();
-144  String rootDir = 
im.getBackupRootDir();
-145  long startTs = im.getStartTs();
-146  long completeTs = 
im.getCompleteTs();
-147  ListHBaseProtos.TableName 
tableListList = im.getTableListList();
-148  ListTableName tableList = 
new ArrayListTableName();
-149  for (HBaseProtos.TableName tn : 
tableListList) {
-150
tableList.add(ProtobufUtil.toTableName(tn));
-151  }
-152
-153  
ListBackupProtos.BackupImage ancestorList = im.getAncestorsList();
-154
-155  BackupType type =
-156  im.getBackupType() == 
BackupProtos.BackupType.FULL ? BackupType.FULL
-157  : BackupType.INCREMENTAL;
-158
-159  BackupImage image = new 
BackupImage(backupId, type, rootDir, tableList, startTs, completeTs);
-160  for (BackupProtos.BackupImage img : 
ancestorList) {
-161
image.addAncestor(fromProto(img));
-162  }
-163  
image.setIncrTimeRanges(loadIncrementalTimestampMap(im));
-164  return image;
-165}
-166
-167BackupProtos.BackupImage toProto() 
{
-168  BackupProtos.BackupImage.Builder 
builder = BackupProtos.BackupImage.newBuilder();
-169  builder.setBackupId(backupId);
-170  
builder.setCompleteTs(completeTs);
-171  

[38/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupHFileCleaner.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupHFileCleaner.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupHFileCleaner.html
index 1592678..ee337b2 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupHFileCleaner.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/BackupHFileCleaner.html
@@ -39,153 +39,155 @@
 031import 
org.apache.hadoop.hbase.HBaseInterfaceAudience;
 032import 
org.apache.hadoop.hbase.TableName;
 033import 
org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
-034import 
org.apache.yetus.audience.InterfaceAudience;
-035import org.slf4j.Logger;
-036import org.slf4j.LoggerFactory;
-037import 
org.apache.hadoop.hbase.client.Connection;
-038import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-039import 
org.apache.hadoop.hbase.master.cleaner.BaseHFileCleanerDelegate;
-040import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+034import 
org.apache.hadoop.hbase.client.Connection;
+035import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+036import 
org.apache.hadoop.hbase.master.cleaner.BaseHFileCleanerDelegate;
+037import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+038import 
org.apache.yetus.audience.InterfaceAudience;
+039import org.slf4j.Logger;
+040import org.slf4j.LoggerFactory;
 041
 042import 
org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
-043import 
org.apache.hbase.thirdparty.com.google.common.base.Predicate;
-044import 
org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
-045
-046/**
-047 * Implementation of a file cleaner that 
checks if an hfile is still referenced by backup before
-048 * deleting it from hfile archive 
directory.
-049 */
-050@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
-051public class BackupHFileCleaner extends 
BaseHFileCleanerDelegate implements Abortable {
-052  private static final Logger LOG = 
LoggerFactory.getLogger(BackupHFileCleaner.class);
-053  private boolean stopped = false;
-054  private boolean aborted;
-055  private Configuration conf;
-056  private Connection connection;
-057  private long prevReadFromBackupTbl = 0, 
// timestamp of most recent read from backup:system table
-058  secondPrevReadFromBackupTbl = 0; // 
timestamp of 2nd most recent read from backup:system table
-059  //used by unit test to skip reading 
backup:system
-060  private boolean 
checkForFullyBackedUpTables = true;
-061  private ListTableName 
fullyBackedUpTables = null;
-062
-063  private SetString 
getFilenameFromBulkLoad(Mapbyte[], ListPath[] maps) {
-064SetString filenames = new 
HashSetString();
-065for (Mapbyte[], 
ListPath map : maps) {
-066  if (map == null) continue;
-067  for (ListPath paths : 
map.values()) {
-068for (Path p : paths) {
-069  filenames.add(p.getName());
-070}
-071  }
-072}
-073return filenames;
-074  }
-075
-076  private SetString 
loadHFileRefs(ListTableName tableList) throws IOException {
-077if (connection == null) {
-078  connection = 
ConnectionFactory.createConnection(conf);
-079}
-080try (BackupSystemTable tbl = new 
BackupSystemTable(connection)) {
-081  Mapbyte[], 
ListPath[] res =
-082  tbl.readBulkLoadedFiles(null, 
tableList);
-083  secondPrevReadFromBackupTbl = 
prevReadFromBackupTbl;
-084  prevReadFromBackupTbl = 
EnvironmentEdgeManager.currentTime();
-085  return 
getFilenameFromBulkLoad(res);
-086}
-087  }
-088
-089  @VisibleForTesting
-090  void 
setCheckForFullyBackedUpTables(boolean b) {
-091checkForFullyBackedUpTables = b;
-092  }
-093  @Override
-094  public IterableFileStatus 
getDeletableFiles(IterableFileStatus files) {
-095if (conf == null) {
-096  return files;
-097}
-098// obtain the Set of TableName's 
which have been fully backed up
-099// so that we filter BulkLoad to be 
returned from server
-100if (checkForFullyBackedUpTables) {
-101  if (connection == null) return 
files;
-102  try (BackupSystemTable tbl = new 
BackupSystemTable(connection)) {
-103fullyBackedUpTables = 
tbl.getTablesForBackupType(BackupType.FULL);
-104  } catch (IOException ioe) {
-105LOG.error("Failed to get tables 
which have been fully backed up, skipping checking", ioe);
-106return Collections.emptyList();
-107  }
-108  
Collections.sort(fullyBackedUpTables);
-109}
-110final SetString hfileRefs;
-111try {
-112  hfileRefs = 
loadHFileRefs(fullyBackedUpTables);
-113} catch (IOException ioe) {
-114  LOG.error("Failed to read hfile 
references, skipping checking deletable files", ioe);
-115  return Collections.emptyList();
-116}
-117IterableFileStatus deletables 
= 

[48/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/org/apache/hadoop/hbase/backup/BackupInfo.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/BackupInfo.html 
b/devapidocs/org/apache/hadoop/hbase/backup/BackupInfo.html
index 394762e..7a875a9 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/BackupInfo.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/BackupInfo.html
@@ -583,7 +583,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 backupId
-privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String backupId
+privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String backupId
 Backup id
 
 
@@ -593,7 +593,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 type
-privateBackupType type
+privateBackupType type
 Backup type, full or incremental
 
 
@@ -603,7 +603,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 backupRootDir
-privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String backupRootDir
+privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String backupRootDir
 Target root directory for storing the backup files
 
 
@@ -613,7 +613,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 state
-privateBackupInfo.BackupState state
+privateBackupInfo.BackupState state
 Backup state
 
 
@@ -623,7 +623,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 phase
-privateBackupInfo.BackupPhase phase
+privateBackupInfo.BackupPhase phase
 Backup phase
 
 
@@ -633,7 +633,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 failedMsg
-privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String failedMsg
+privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String failedMsg
 Backup failure message
 
 
@@ -643,7 +643,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 backupTableInfoMap
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,BackupTableInfo backupTableInfoMap
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/Map.html?is-external=true;
 title="class or interface in java.util">MapTableName,BackupTableInfo backupTableInfoMap
 Backup status map for all tables
 
 
@@ -653,7 +653,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 startTs
-privatelong startTs
+privatelong startTs
 Actual start timestamp of a backup process
 
 
@@ -663,7 +663,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 completeTs
-privatelong completeTs
+privatelong completeTs
 Actual end timestamp of the backup process
 
 
@@ -673,7 +673,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 totalBytesCopied
-privatelong totalBytesCopied
+privatelong totalBytesCopied
 Total bytes of incremental logs copied
 
 
@@ -683,7 +683,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 hlogTargetDir
-privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String hlogTargetDir
+privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String hlogTargetDir
 For incremental backup, a location of a backed-up 
hlogs
 
 
@@ -693,7 +693,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 incrBackupFileList
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String incrBackupFileList
+privatehttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">Listhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String incrBackupFileList
 Incremental backup file list
 
 
@@ -703,7 +703,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/lang/Comparabl
 
 
 tableSetTimestampMap
-privatehttp://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true;
 title="class or interface in 

[20/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.html
index eb9e252..667152a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.html
@@ -28,22 +28,22 @@
 020
 021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
 022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 039
 040import java.io.IOException;
 041import java.net.URI;
@@ -70,194 +70,194 @@
 062import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 063import 
org.apache.hadoop.hbase.client.Connection;
 064import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-065import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-066import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-067import 
org.apache.yetus.audience.InterfaceAudience;
-068
-069/**
-070 * General backup commands, options and 
usage messages
-071 */
-072
+065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+066import 
org.apache.yetus.audience.InterfaceAudience;
+067
+068import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+069
+070/**
+071 * General backup commands, options and 
usage messages
+072 */
 073@InterfaceAudience.Private
 074public final class BackupCommands {
-075
-076  public final static String 
INCORRECT_USAGE = "Incorrect usage";
-077
-078  public final static String 
TOP_LEVEL_NOT_ALLOWED =
-079  "Top level (root) folder is not 
allowed to be a backup destination";
-080
-081  public static final String USAGE = 
"Usage: hbase backup COMMAND [command-specific arguments]\n"
-082  + "where COMMAND 

[19/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManager.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManager.html
index a708c67..84b2841 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManager.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManager.html
@@ -89,426 +89,420 @@
 081this.conf = conf;
 082this.conn = conn;
 083this.systemTable = new 
BackupSystemTable(conn);
-084
-085  }
-086
-087  /**
-088   * Returns backup info
-089   */
-090  protected BackupInfo getBackupInfo() 
{
-091return backupInfo;
-092  }
-093
-094  /**
-095   * This method modifies the master's 
configuration in order to inject backup-related features
-096   * (TESTs only)
-097   * @param conf configuration
-098   */
-099  @VisibleForTesting
-100  public static void 
decorateMasterConfiguration(Configuration conf) {
-101if (!isBackupEnabled(conf)) {
-102  return;
-103}
-104// Add WAL archive cleaner plug-in
-105String plugins = 
conf.get(HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS);
-106String cleanerClass = 
BackupLogCleaner.class.getCanonicalName();
-107if (!plugins.contains(cleanerClass)) 
{
-108  
conf.set(HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS, plugins + "," + 
cleanerClass);
-109}
-110
-111String classes = 
conf.get(ProcedureManagerHost.MASTER_PROCEDURE_CONF_KEY);
-112String masterProcedureClass = 
LogRollMasterProcedureManager.class.getName();
-113if (classes == null) {
-114  
conf.set(ProcedureManagerHost.MASTER_PROCEDURE_CONF_KEY, 
masterProcedureClass);
-115} else if 
(!classes.contains(masterProcedureClass)) {
-116  
conf.set(ProcedureManagerHost.MASTER_PROCEDURE_CONF_KEY, classes + "," + 
masterProcedureClass);
+084  }
+085
+086  /**
+087   * Returns backup info
+088   */
+089  protected BackupInfo getBackupInfo() 
{
+090return backupInfo;
+091  }
+092
+093  /**
+094   * This method modifies the master's 
configuration in order to inject backup-related features
+095   * (TESTs only)
+096   * @param conf configuration
+097   */
+098  @VisibleForTesting
+099  public static void 
decorateMasterConfiguration(Configuration conf) {
+100if (!isBackupEnabled(conf)) {
+101  return;
+102}
+103// Add WAL archive cleaner plug-in
+104String plugins = 
conf.get(HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS);
+105String cleanerClass = 
BackupLogCleaner.class.getCanonicalName();
+106if (!plugins.contains(cleanerClass)) 
{
+107  
conf.set(HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS, plugins + "," + 
cleanerClass);
+108}
+109
+110String classes = 
conf.get(ProcedureManagerHost.MASTER_PROCEDURE_CONF_KEY);
+111String masterProcedureClass = 
LogRollMasterProcedureManager.class.getName();
+112if (classes == null) {
+113  
conf.set(ProcedureManagerHost.MASTER_PROCEDURE_CONF_KEY, 
masterProcedureClass);
+114} else if 
(!classes.contains(masterProcedureClass)) {
+115  
conf.set(ProcedureManagerHost.MASTER_PROCEDURE_CONF_KEY, classes + ","
+116  + masterProcedureClass);
 117}
 118
 119if (LOG.isDebugEnabled()) {
 120  LOG.debug("Added log cleaner: " + 
cleanerClass + "\n" + "Added master procedure manager: "
 121  + masterProcedureClass);
 122}
-123
-124  }
-125
-126  /**
-127   * This method modifies the Region 
Server configuration in order to inject backup-related features
-128   * TESTs only.
-129   * @param conf configuration
-130   */
-131  @VisibleForTesting
-132  public static void 
decorateRegionServerConfiguration(Configuration conf) {
-133if (!isBackupEnabled(conf)) {
-134  return;
-135}
-136
-137String classes = 
conf.get(ProcedureManagerHost.REGIONSERVER_PROCEDURE_CONF_KEY);
-138String regionProcedureClass = 
LogRollRegionServerProcedureManager.class.getName();
-139if (classes == null) {
-140  
conf.set(ProcedureManagerHost.REGIONSERVER_PROCEDURE_CONF_KEY, 
regionProcedureClass);
-141} else if 
(!classes.contains(regionProcedureClass)) {
-142  
conf.set(ProcedureManagerHost.REGIONSERVER_PROCEDURE_CONF_KEY, classes + ","
-143  + regionProcedureClass);
-144}
-145String coproc = 
conf.get(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY);
-146String regionObserverClass = 
BackupObserver.class.getName();
-147
conf.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, (coproc == null ? "" : 
coproc + ",") +
-148regionObserverClass);
-149if (LOG.isDebugEnabled()) {
-150  LOG.debug("Added region procedure 
manager: " + regionProcedureClass +
-151". Added region observer: " + 
regionObserverClass);
-152}
-153  }
-154
-155  public static boolean 
isBackupEnabled(Configuration conf) {

[13/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
index 2ac2191..a0a02e0 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/FullTableBackupClient.html
@@ -47,12 +47,12 @@
 039import 
org.apache.hadoop.hbase.backup.BackupType;
 040import 
org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager;
 041import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-042import 
org.apache.yetus.audience.InterfaceAudience;
-043import org.slf4j.Logger;
-044import org.slf4j.LoggerFactory;
-045import 
org.apache.hadoop.hbase.client.Admin;
-046import 
org.apache.hadoop.hbase.client.Connection;
-047import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+042import 
org.apache.hadoop.hbase.client.Admin;
+043import 
org.apache.hadoop.hbase.client.Connection;
+044import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+045import 
org.apache.yetus.audience.InterfaceAudience;
+046import org.slf4j.Logger;
+047import org.slf4j.LoggerFactory;
 048
 049/**
 050 * Full table backup implementation
@@ -94,7 +94,7 @@
 086  // Currently we simply set the sub 
copy tasks by counting the table snapshot number, we can
 087  // calculate the real files' size 
for the percentage in the future.
 088  // 
backupCopier.setSubTaskPercntgInWholeTask(1f / numOfSnapshots);
-089  int res = 0;
+089  int res;
 090  String[] args = new String[4];
 091  args[0] = "-snapshot";
 092  args[1] = 
backupInfo.getSnapshotName(table);
@@ -124,121 +124,119 @@
 116  }
 117
 118  /**
-119   * Backup request execution
-120   * @throws IOException
-121   */
-122  @Override
-123  public void execute() throws 
IOException {
-124try (Admin admin = conn.getAdmin()) 
{
-125  // Begin BACKUP
-126  beginBackup(backupManager, 
backupInfo);
-127  String savedStartCode = null;
-128  boolean firstBackup = false;
-129  // do snapshot for full table 
backup
-130
-131  savedStartCode = 
backupManager.readBackupStartCode();
-132  firstBackup = savedStartCode == 
null || Long.parseLong(savedStartCode) == 0L;
-133  if (firstBackup) {
-134// This is our first backup. 
Let's put some marker to system table so that we can hold the logs
-135// while we do the backup.
-136
backupManager.writeBackupStartCode(0L);
-137  }
-138  // We roll log here before we do 
the snapshot. It is possible there is duplicate data
-139  // in the log that is already in 
the snapshot. But if we do it after the snapshot, we
-140  // could have data loss.
-141  // A better approach is to do the 
roll log on each RS in the same global procedure as
-142  // the snapshot.
-143  LOG.info("Execute roll log 
procedure for full backup ...");
-144
-145  MapString, String props = 
new HashMapString, String();
-146  props.put("backupRoot", 
backupInfo.getBackupRootDir());
-147  
admin.execProcedure(LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_SIGNATURE,
-148
LogRollMasterProcedureManager.ROLLLOG_PROCEDURE_NAME, props);
-149
-150  newTimestamps = 
backupManager.readRegionServerLastLogRollResult();
-151  if (firstBackup) {
-152// Updates registered log files
-153// We record ALL old WAL files as 
registered, because
-154// this is a first full backup in 
the system and these
-155// files are not needed for next 
incremental backup
-156ListString logFiles = 
BackupUtils.getWALFilesOlderThan(conf, newTimestamps);
-157
backupManager.recordWALFiles(logFiles);
-158  }
-159
-160  // SNAPSHOT_TABLES:
-161  
backupInfo.setPhase(BackupPhase.SNAPSHOT);
-162  for (TableName tableName : 
tableList) {
-163String snapshotName =
-164"snapshot_" + 
Long.toString(EnvironmentEdgeManager.currentTime()) + "_"
-165+ 
tableName.getNamespaceAsString() + "_" + tableName.getQualifierAsString();
-166
-167snapshotTable(admin, tableName, 
snapshotName);
-168
backupInfo.setSnapshotName(tableName, snapshotName);
-169  }
-170
-171  // SNAPSHOT_COPY:
-172  // do snapshot copy
-173  LOG.debug("snapshot copy for " + 
backupId);
-174  snapshotCopy(backupInfo);
-175  // Updates incremental backup table 
set
-176  
backupManager.addIncrementalBackupTableSet(backupInfo.getTables());
-177
-178  // BACKUP_COMPLETE:
-179  // set overall backup status: 
complete. Here we make sure to complete the backup.
-180  // After this checkpoint, even if 
entering cancel process, will let the backup 

[49/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/checkstyle.rss
--
diff --git a/checkstyle.rss b/checkstyle.rss
index 0661233..4b0b1f2 100644
--- a/checkstyle.rss
+++ b/checkstyle.rss
@@ -26,7 +26,7 @@ under the License.
 2007 - 2018 The Apache Software Foundation
 
   File: 3511,
- Errors: 17497,
+ Errors: 17257,
  Warnings: 0,
  Infos: 0
   
@@ -965,7 +965,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -1063,7 +1063,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -2393,7 +2393,7 @@ under the License.
   0
 
 
-  16
+  0
 
   
   
@@ -2855,7 +2855,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -2995,7 +2995,7 @@ under the License.
   0
 
 
-  3
+  0
 
   
   
@@ -3737,7 +3737,7 @@ under the License.
   0
 
 
-  2
+  0
 
   
   
@@ -4087,7 +4087,7 @@ under the License.
   0
 
 
-  2
+  0
 
   
   
@@ -6019,7 +6019,7 @@ under the License.
   0
 
 
-  4
+  0
 
   
   
@@ -6103,7 +6103,7 @@ under the License.
   0
 
 
-  13
+  0
 
   
   
@@ -6439,7 +6439,7 @@ under the License.
   0
 
 
-  2
+  0
 
   
   
@@ -6677,7 +6677,7 @@ under the License.
   0
 
 
-  4
+  0
 
   
   
@@ -10303,7 +10303,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -10765,7 +10765,7 @@ under the License.
   0
 
 
-  6
+  0
 
   
   
@@ -12543,7 +12543,7 @@ under the License.
   0
 
 
-  21
+  0
 
   
   
@@ -12753,7 +12753,7 @@ under the License.
   0
 
 
-  3
+  0
 
   
   
@@ -13565,7 +13565,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -14433,7 +14433,7 @@ under the License.
   0
 
 
-  4
+  0
 
   
   
@@ -15665,7 +15665,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -16435,7 +16435,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -18045,7 +18045,7 @@ under the License.
   0
 
 
-  9
+  0
 
   
   
@@ -18367,7 +18367,7 @@ under the License.
   0
 
 
-  2
+  0
 
   
   
@@ -18787,7 +18787,7 @@ under the License.
   0
 
 
-  1
+  0
 
   
   
@@ -21909,7 +21909,7 @@ under the 

[39/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/org/apache/hadoop/hbase/backup/util/BackupUtils.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/util/BackupUtils.html 
b/devapidocs/org/apache/hadoop/hbase/backup/util/BackupUtils.html
index 081b149..5a31951 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/util/BackupUtils.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/util/BackupUtils.html
@@ -499,11 +499,10 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 copyTableRegionInfo
-public staticvoidcopyTableRegionInfo(Connectionconn,
+public staticvoidcopyTableRegionInfo(Connectionconn,
BackupInfobackupInfo,

org.apache.hadoop.conf.Configurationconf)
-throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException,
-   http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException
+throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 copy out Table RegionInfo into incremental backup image 
need to consider move this logic into
  HBackupFileSystem
 
@@ -513,7 +512,6 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 conf - configuration
 Throws:
 http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException - exception
-http://docs.oracle.com/javase/8/docs/api/java/lang/InterruptedException.html?is-external=true;
 title="class or interface in java.lang">InterruptedException - 
exception
 
 
 
@@ -523,7 +521,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 writeRegioninfoOnFilesystem
-public staticvoidwriteRegioninfoOnFilesystem(org.apache.hadoop.conf.Configurationconf,
+public staticvoidwriteRegioninfoOnFilesystem(org.apache.hadoop.conf.Configurationconf,

org.apache.hadoop.fs.FileSystemfs,

org.apache.hadoop.fs.PathregionInfoDir,
RegionInforegionInfo)
@@ -541,7 +539,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 parseHostNameFromLogFile
-public statichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringparseHostNameFromLogFile(org.apache.hadoop.fs.Pathp)
+public statichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringparseHostNameFromLogFile(org.apache.hadoop.fs.Pathp)
 Parses hostname:port from WAL file path
 
 Parameters:
@@ -557,17 +555,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getUniqueWALFileNamePart
-public statichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetUniqueWALFileNamePart(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringwalFileName)
-   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+public statichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetUniqueWALFileNamePart(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringwalFileName)
 Returns WAL file name
 
 Parameters:
 walFileName - WAL file name
 Returns:
 WAL file name
-Throws:
-http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException - exception
-http://docs.oracle.com/javase/8/docs/api/java/lang/IllegalArgumentException.html?is-external=true;
 title="class or interface in java.lang">IllegalArgumentException - 
exception
 
 
 
@@ -577,16 +571,13 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getUniqueWALFileNamePart
-public statichttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetUniqueWALFileNamePart(org.apache.hadoop.fs.Pathp)
-   throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+public 

[11/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.html
index 75e3c2b..6c25207 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/IncrementalTableBackupClient.html
@@ -80,7 +80,7 @@
 072  }
 073
 074  protected ListString 
filterMissingFiles(ListString incrBackupFileList) throws IOException 
{
-075ListString list = new 
ArrayListString();
+075ListString list = new 
ArrayList();
 076for (String file : 
incrBackupFileList) {
 077  Path p = new Path(file);
 078  if (fs.exists(p) || 
isActiveWalPath(p)) {
@@ -102,201 +102,201 @@
 094  }
 095
 096  protected static int getIndex(TableName 
tbl, ListTableName sTableList) {
-097if (sTableList == null) return 0;
-098for (int i = 0; i  
sTableList.size(); i++) {
-099  if (tbl.equals(sTableList.get(i))) 
{
-100return i;
-101  }
-102}
-103return -1;
-104  }
-105
-106  /*
-107   * Reads bulk load records from backup 
table, iterates through the records and forms the paths
-108   * for bulk loaded hfiles. Copies the 
bulk loaded hfiles to backup destination
-109   * @param sTableList list of tables to 
be backed up
-110   * @return map of table to List of 
files
-111   */
-112  @SuppressWarnings("unchecked")
-113  protected Mapbyte[], 
ListPath[] handleBulkLoad(ListTableName sTableList) throws 
IOException {
-114Mapbyte[], ListPath[] 
mapForSrc = new Map[sTableList.size()];
-115ListString activeFiles = new 
ArrayListString();
-116ListString archiveFiles = new 
ArrayListString();
-117PairMapTableName, 
MapString, MapString, ListPairString, 
Boolean, Listbyte[] pair =
-118
backupManager.readBulkloadRows(sTableList);
-119MapTableName, MapString, 
MapString, ListPairString, Boolean map = 
pair.getFirst();
-120FileSystem tgtFs;
-121try {
-122  tgtFs = FileSystem.get(new 
URI(backupInfo.getBackupRootDir()), conf);
-123} catch (URISyntaxException use) {
-124  throw new IOException("Unable to 
get FileSystem", use);
-125}
-126Path rootdir = 
FSUtils.getRootDir(conf);
-127Path tgtRoot = new Path(new 
Path(backupInfo.getBackupRootDir()), backupId);
-128
-129for (Map.EntryTableName, 
MapString, MapString, ListPairString, 
Boolean tblEntry :
-130  map.entrySet()) {
-131  TableName srcTable = 
tblEntry.getKey();
+097if (sTableList == null) {
+098  return 0;
+099}
+100
+101for (int i = 0; i  
sTableList.size(); i++) {
+102  if (tbl.equals(sTableList.get(i))) 
{
+103return i;
+104  }
+105}
+106return -1;
+107  }
+108
+109  /*
+110   * Reads bulk load records from backup 
table, iterates through the records and forms the paths
+111   * for bulk loaded hfiles. Copies the 
bulk loaded hfiles to backup destination
+112   * @param sTableList list of tables to 
be backed up
+113   * @return map of table to List of 
files
+114   */
+115  @SuppressWarnings("unchecked")
+116  protected Mapbyte[], 
ListPath[] handleBulkLoad(ListTableName sTableList)
+117  throws IOException {
+118Mapbyte[], ListPath[] 
mapForSrc = new Map[sTableList.size()];
+119ListString activeFiles = new 
ArrayList();
+120ListString archiveFiles = new 
ArrayList();
+121PairMapTableName, 
MapString, MapString, ListPairString, 
Boolean, Listbyte[] pair =
+122
backupManager.readBulkloadRows(sTableList);
+123MapTableName, MapString, 
MapString, ListPairString, Boolean map = 
pair.getFirst();
+124FileSystem tgtFs;
+125try {
+126  tgtFs = FileSystem.get(new 
URI(backupInfo.getBackupRootDir()), conf);
+127} catch (URISyntaxException use) {
+128  throw new IOException("Unable to 
get FileSystem", use);
+129}
+130Path rootdir = 
FSUtils.getRootDir(conf);
+131Path tgtRoot = new Path(new 
Path(backupInfo.getBackupRootDir()), backupId);
 132
-133  int srcIdx = getIndex(srcTable, 
sTableList);
-134  if (srcIdx  0) {
-135LOG.warn("Couldn't find " + 
srcTable + " in source table List");
-136continue;
-137  }
-138  if (mapForSrc[srcIdx] == null) {
-139mapForSrc[srcIdx] = new 
TreeMapbyte[], ListPath(Bytes.BYTES_COMPARATOR);
-140  }
-141  Path tblDir = 
FSUtils.getTableDir(rootdir, srcTable);
-142  Path tgtTable = new Path(new 
Path(tgtRoot, srcTable.getNamespaceAsString()),
-143  
srcTable.getQualifierAsString());
-144  for 
(Map.EntryString,MapString,ListPairString, 
Boolean regionEntry :
-145
tblEntry.getValue().entrySet()){
-146String regionName = 

[22/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.ProgressCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.ProgressCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.ProgressCommand.html
index eb9e252..667152a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.ProgressCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.ProgressCommand.html
@@ -28,22 +28,22 @@
 020
 021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
 022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 039
 040import java.io.IOException;
 041import java.net.URI;
@@ -70,194 +70,194 @@
 062import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 063import 
org.apache.hadoop.hbase.client.Connection;
 064import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-065import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-066import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-067import 
org.apache.yetus.audience.InterfaceAudience;
-068
-069/**
-070 * General backup commands, options and 
usage messages
-071 */
-072
+065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+066import 
org.apache.yetus.audience.InterfaceAudience;
+067
+068import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+069
+070/**
+071 * General backup commands, options and 
usage messages
+072 */
 073@InterfaceAudience.Private
 074public final class BackupCommands {
-075
-076  public final static String 
INCORRECT_USAGE = "Incorrect usage";
-077
-078  public final static String 
TOP_LEVEL_NOT_ALLOWED =
-079  "Top level (root) folder is not 
allowed to be a backup destination";
-080
-081  public static final String USAGE = 

[06/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/LogRollMasterProcedureManager.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/LogRollMasterProcedureManager.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/LogRollMasterProcedureManager.html
index ef6faa1..dae573d 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/LogRollMasterProcedureManager.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/master/LogRollMasterProcedureManager.html
@@ -32,26 +32,26 @@
 024import 
java.util.concurrent.ThreadPoolExecutor;
 025
 026import 
org.apache.hadoop.conf.Configuration;
-027import 
org.apache.hadoop.hbase.ServerName;
-028import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
-029import 
org.apache.hadoop.hbase.backup.impl.BackupManager;
-030import 
org.apache.hadoop.hbase.coordination.ZkCoordinatedStateManager;
-031import 
org.apache.yetus.audience.InterfaceAudience;
-032import 
org.apache.hadoop.hbase.CoordinatedStateManager;
-033import 
org.apache.hadoop.hbase.errorhandling.ForeignException;
-034import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
-035import 
org.apache.hadoop.hbase.master.MasterServices;
-036import 
org.apache.hadoop.hbase.master.MetricsMaster;
-037import 
org.apache.hadoop.hbase.procedure.MasterProcedureManager;
-038import 
org.apache.hadoop.hbase.procedure.Procedure;
-039import 
org.apache.hadoop.hbase.procedure.ProcedureCoordinator;
-040import 
org.apache.hadoop.hbase.procedure.ProcedureCoordinatorRpcs;
-041import 
org.apache.hadoop.hbase.procedure.RegionServerProcedureManager;
-042import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;
-043import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
-044import 
org.apache.zookeeper.KeeperException;
-045import org.slf4j.Logger;
-046import org.slf4j.LoggerFactory;
+027import 
org.apache.hadoop.hbase.CoordinatedStateManager;
+028import 
org.apache.hadoop.hbase.ServerName;
+029import 
org.apache.hadoop.hbase.backup.BackupRestoreConstants;
+030import 
org.apache.hadoop.hbase.backup.impl.BackupManager;
+031import 
org.apache.hadoop.hbase.coordination.ZkCoordinatedStateManager;
+032import 
org.apache.hadoop.hbase.errorhandling.ForeignException;
+033import 
org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
+034import 
org.apache.hadoop.hbase.master.MasterServices;
+035import 
org.apache.hadoop.hbase.master.MetricsMaster;
+036import 
org.apache.hadoop.hbase.procedure.MasterProcedureManager;
+037import 
org.apache.hadoop.hbase.procedure.Procedure;
+038import 
org.apache.hadoop.hbase.procedure.ProcedureCoordinator;
+039import 
org.apache.hadoop.hbase.procedure.ProcedureCoordinatorRpcs;
+040import 
org.apache.hadoop.hbase.procedure.RegionServerProcedureManager;
+041import 
org.apache.yetus.audience.InterfaceAudience;
+042import org.slf4j.Logger;
+043import org.slf4j.LoggerFactory;
+044
+045import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;
+046import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
 047
 048/**
 049 * Master procedure manager for 
coordinated cluster-wide WAL roll operation, which is run during
@@ -65,114 +65,114 @@
 057  public static final String 
ROLLLOG_PROCEDURE_NAME = "rolllog";
 058  public static final String 
BACKUP_WAKE_MILLIS_KEY = "hbase.backup.logroll.wake.millis";
 059  public static final String 
BACKUP_TIMEOUT_MILLIS_KEY = "hbase.backup.logroll.timeout.millis";
-060  public static final String 
BACKUP_POOL_THREAD_NUMBER_KEY = "hbase.backup.logroll.pool.thread.number";
-061
-062  public static final int 
BACKUP_WAKE_MILLIS_DEFAULT = 500;
-063  public static final int 
BACKUP_TIMEOUT_MILLIS_DEFAULT = 18;
-064  public static final int 
BACKUP_POOL_THREAD_NUMBER_DEFAULT = 8;
-065  private MasterServices master;
-066  private ProcedureCoordinator 
coordinator;
-067  private boolean done;
-068
-069  @Override
-070  public void stop(String why) {
-071LOG.info("stop: " + why);
-072  }
-073
-074  @Override
-075  public boolean isStopped() {
-076return false;
-077  }
-078
-079  @Override
-080  public void initialize(MasterServices 
master, MetricsMaster metricsMaster)
-081  throws KeeperException, 
IOException, UnsupportedOperationException {
-082this.master = master;
-083this.done = false;
-084
-085// setup the default procedure 
coordinator
-086String name = 
master.getServerName().toString();
-087
+060  public static final String 
BACKUP_POOL_THREAD_NUMBER_KEY =
+061  
"hbase.backup.logroll.pool.thread.number";
+062
+063  public static final int 
BACKUP_WAKE_MILLIS_DEFAULT = 500;
+064  public static final int 
BACKUP_TIMEOUT_MILLIS_DEFAULT = 18;
+065  public static final int 

[40/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.html
 
b/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.html
index d224a6b..a83f512 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.html
@@ -114,7 +114,7 @@ var activeTableTab = "activeTableTab";
 
 
 @InterfaceAudience.Private
-public class MapReduceBackupMergeJob
+public class MapReduceBackupMergeJob
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true;
 title="class or interface in java.lang">Object
 implements BackupMergeJob
 MapReduce implementation of BackupMergeJob
@@ -228,7 +228,7 @@ implements 
 void
 run(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]backupIds)
-Run backup merge operation
+Run backup merge operation.
 
 
 
@@ -277,7 +277,7 @@ implements 
 
 LOG
-public static finalorg.slf4j.Logger LOG
+public static finalorg.slf4j.Logger LOG
 
 
 
@@ -286,7 +286,7 @@ implements 
 
 player
-protectedorg.apache.hadoop.util.Tool player
+protectedorg.apache.hadoop.util.Tool player
 
 
 
@@ -295,7 +295,7 @@ implements 
 
 conf
-protectedorg.apache.hadoop.conf.Configuration conf
+protectedorg.apache.hadoop.conf.Configuration conf
 
 
 
@@ -312,7 +312,7 @@ implements 
 
 MapReduceBackupMergeJob
-publicMapReduceBackupMergeJob()
+publicMapReduceBackupMergeJob()
 
 
 
@@ -329,7 +329,7 @@ implements 
 
 getConf
-publicorg.apache.hadoop.conf.ConfigurationgetConf()
+publicorg.apache.hadoop.conf.ConfigurationgetConf()
 
 Specified by:
 getConfin 
interfaceorg.apache.hadoop.conf.Configurable
@@ -342,7 +342,7 @@ implements 
 
 setConf
-publicvoidsetConf(org.apache.hadoop.conf.Configurationconf)
+publicvoidsetConf(org.apache.hadoop.conf.Configurationconf)
 
 Specified by:
 setConfin 
interfaceorg.apache.hadoop.conf.Configurable
@@ -355,17 +355,17 @@ implements 
 
 run
-publicvoidrun(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]backupIds)
+publicvoidrun(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]backupIds)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Description copied from 
interface:BackupMergeJob
-Run backup merge operation
+Run backup merge operation.
 
 Specified by:
 runin
 interfaceBackupMergeJob
 Parameters:
 backupIds - backup image ids
 Throws:
-http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException - if the backup 
merge operation fails
 
 
 
@@ -375,7 +375,7 @@ implements 
 
 toPathList
-protectedhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.PathtoPathList(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPairTableName,org.apache.hadoop.fs.PathprocessedTableList)
+protectedhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in 
java.util">Listorg.apache.hadoop.fs.PathtoPathList(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPairTableName,org.apache.hadoop.fs.PathprocessedTableList)
 
 
 
@@ -384,7 +384,7 @@ implements 
 
 toTableNameList
-protectedhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableNametoTableNameList(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPairTableName,org.apache.hadoop.fs.PathprocessedTableList)
+protectedhttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableNametoTableNameList(http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListPairTableName,org.apache.hadoop.fs.PathprocessedTableList)
 
 
 
@@ -393,7 +393,7 @@ implements 
 
 cleanupBulkLoadDirs
-protectedvoidcleanupBulkLoadDirs(org.apache.hadoop.fs.FileSystemfs,
+protectedvoidcleanupBulkLoadDirs(org.apache.hadoop.fs.FileSystemfs,

[44/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManager.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManager.html 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManager.html
index bd348da..4ee0b52 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManager.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManager.html
@@ -297,7 +297,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 http://docs.oracle.com/javase/8/docs/api/java/util/Iterator.html?is-external=true;
 title="class or interface in java.util">IteratorBackupSystemTable.WALItem
 getWALFilesFromBackupSystem()
-Get WAL files iterator
+Get WAL files iterator.
 
 
 
@@ -476,7 +476,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 getBackupInfo
-protectedBackupInfogetBackupInfo()
+protectedBackupInfogetBackupInfo()
 Returns backup info
 
 
@@ -486,7 +486,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 decorateMasterConfiguration
-public staticvoiddecorateMasterConfiguration(org.apache.hadoop.conf.Configurationconf)
+public staticvoiddecorateMasterConfiguration(org.apache.hadoop.conf.Configurationconf)
 This method modifies the master's configuration in order to 
inject backup-related features
  (TESTs only)
 
@@ -501,7 +501,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 decorateRegionServerConfiguration
-public staticvoiddecorateRegionServerConfiguration(org.apache.hadoop.conf.Configurationconf)
+public staticvoiddecorateRegionServerConfiguration(org.apache.hadoop.conf.Configurationconf)
 This method modifies the Region Server configuration in 
order to inject backup-related features
  TESTs only.
 
@@ -516,7 +516,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 isBackupEnabled
-public staticbooleanisBackupEnabled(org.apache.hadoop.conf.Configurationconf)
+public staticbooleanisBackupEnabled(org.apache.hadoop.conf.Configurationconf)
 
 
 
@@ -525,7 +525,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 getConf
-org.apache.hadoop.conf.ConfigurationgetConf()
+org.apache.hadoop.conf.ConfigurationgetConf()
 Get configuration
 
 Returns:
@@ -539,7 +539,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 close
-publicvoidclose()
+publicvoidclose()
 Stop all the work of backup.
 
 Specified by:
@@ -555,7 +555,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 createBackupInfo
-publicBackupInfocreateBackupInfo(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringbackupId,
+publicBackupInfocreateBackupInfo(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringbackupId,
BackupTypetype,
http://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableNametableList,
http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringtargetRootDir,
@@ -584,7 +584,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 getOngoingBackupId
-privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOngoingBackupId()
+privatehttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">StringgetOngoingBackupId()
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Check if any ongoing backup. Currently, we only reply on 
checking status in backup system
  table. We need to consider to handle the case of orphan records in the 
future. Otherwise, all
@@ -603,7 +603,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 initialize
-publicvoidinitialize()
+publicvoidinitialize()
 throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Start the backup manager service.
 
@@ -618,7 +618,7 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 setBackupInfo
-publicvoidsetBackupInfo(BackupInfobackupInfo)
+publicvoidsetBackupInfo(BackupInfobackupInfo)
 
 
 
@@ -627,9 +627,8 @@ implements http://docs.oracle.com/javase/8/docs/api/java/io/Closeable.h
 
 
 getAncestors

[07/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.html
index e96923b..67775bf 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/mapreduce/MapReduceBackupMergeJob.html
@@ -46,287 +46,272 @@
 038import 
org.apache.hadoop.hbase.backup.impl.BackupManifest;
 039import 
org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
 040import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-041import 
org.apache.yetus.audience.InterfaceAudience;
-042import org.slf4j.Logger;
-043import org.slf4j.LoggerFactory;
-044import 
org.apache.hadoop.hbase.client.Connection;
-045import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-046import 
org.apache.hadoop.hbase.util.Pair;
-047import org.apache.hadoop.util.Tool;
+041import 
org.apache.hadoop.hbase.client.Connection;
+042import 
org.apache.hadoop.hbase.client.ConnectionFactory;
+043import 
org.apache.hadoop.hbase.util.Pair;
+044import org.apache.hadoop.util.Tool;
+045import 
org.apache.yetus.audience.InterfaceAudience;
+046import org.slf4j.Logger;
+047import org.slf4j.LoggerFactory;
 048
 049/**
 050 * MapReduce implementation of {@link 
BackupMergeJob}
 051 * Must be initialized with configuration 
of a backup destination cluster
 052 *
 053 */
-054
-055@InterfaceAudience.Private
-056public class MapReduceBackupMergeJob 
implements BackupMergeJob {
-057  public static final Logger LOG = 
LoggerFactory.getLogger(MapReduceBackupMergeJob.class);
-058
-059  protected Tool player;
-060  protected Configuration conf;
-061
-062  public MapReduceBackupMergeJob() {
-063  }
-064
-065  @Override
-066  public Configuration getConf() {
-067return conf;
-068  }
-069
-070  @Override
-071  public void setConf(Configuration conf) 
{
-072this.conf = conf;
-073  }
-074
-075  @Override
-076  public void run(String[] backupIds) 
throws IOException {
-077String bulkOutputConfKey;
-078
-079// TODO : run player on remote 
cluster
-080player = new 
MapReduceHFileSplitterJob();
-081bulkOutputConfKey = 
MapReduceHFileSplitterJob.BULK_OUTPUT_CONF_KEY;
-082// Player reads all files in 
arbitrary directory structure and creates
-083// a Map task for each file
-084String bids = 
StringUtils.join(backupIds, ",");
-085
-086if (LOG.isDebugEnabled()) {
-087  LOG.debug("Merge backup images " + 
bids);
-088}
-089
-090ListPairTableName, 
Path processedTableList = new ArrayListPairTableName, 
Path();
-091boolean finishedTables = false;
-092Connection conn = 
ConnectionFactory.createConnection(getConf());
-093BackupSystemTable table = new 
BackupSystemTable(conn);
-094FileSystem fs = 
FileSystem.get(getConf());
-095
-096try {
-097
-098  // Get exclusive lock on backup 
system
-099  
table.startBackupExclusiveOperation();
-100  // Start merge operation
-101  
table.startMergeOperation(backupIds);
-102
-103  // Select most recent backup id
-104  String mergedBackupId = 
findMostRecentBackupId(backupIds);
-105
-106  TableName[] tableNames = 
getTableNamesInBackupImages(backupIds);
-107  String backupRoot = null;
-108
-109  BackupInfo bInfo = 
table.readBackupInfo(backupIds[0]);
-110  backupRoot = 
bInfo.getBackupRootDir();
-111
-112  for (int i = 0; i  
tableNames.length; i++) {
-113
-114LOG.info("Merge backup images for 
" + tableNames[i]);
-115
-116// Find input directories for 
table
-117
-118Path[] dirPaths = 
findInputDirectories(fs, backupRoot, tableNames[i], backupIds);
-119String dirs = 
StringUtils.join(dirPaths, ",");
-120Path bulkOutputPath =
-121
BackupUtils.getBulkOutputDir(BackupUtils.getFileNameCompatibleString(tableNames[i]),
-122  getConf(), false);
-123// Delete content if exists
-124if (fs.exists(bulkOutputPath)) 
{
-125  if (!fs.delete(bulkOutputPath, 
true)) {
-126LOG.warn("Can not delete: " + 
bulkOutputPath);
-127  }
-128}
-129Configuration conf = getConf();
-130conf.set(bulkOutputConfKey, 
bulkOutputPath.toString());
-131String[] playerArgs = { dirs, 
tableNames[i].getNameAsString() };
-132
-133int result = 0;
-134
-135player.setConf(getConf());
-136result = 
player.run(playerArgs);
-137if (!succeeded(result)) {
-138  throw new IOException("Can not 
merge backup images for " + dirs
-139  + " (check Hadoop/MR and 
HBase logs). Player return code =" + result);
-140}
-141// Add to processed table list

[15/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
index 3f74159..3445980 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.WALItem.html
@@ -97,809 +97,809 @@
 089 * value = backupId and full WAL file 
name/li
 090 * /ul/p
 091 */
-092
-093@InterfaceAudience.Private
-094public final class BackupSystemTable 
implements Closeable {
-095  private static final Logger LOG = 
LoggerFactory.getLogger(BackupSystemTable.class);
-096
-097  static class WALItem {
-098String backupId;
-099String walFile;
-100String backupRoot;
-101
-102WALItem(String backupId, String 
walFile, String backupRoot) {
-103  this.backupId = backupId;
-104  this.walFile = walFile;
-105  this.backupRoot = backupRoot;
-106}
-107
-108public String getBackupId() {
-109  return backupId;
-110}
-111
-112public String getWalFile() {
-113  return walFile;
-114}
-115
-116public String getBackupRoot() {
-117  return backupRoot;
-118}
-119
-120@Override
-121public String toString() {
-122  return Path.SEPARATOR + backupRoot 
+ Path.SEPARATOR + backupId + Path.SEPARATOR + walFile;
-123}
+092@InterfaceAudience.Private
+093public final class BackupSystemTable 
implements Closeable {
+094  private static final Logger LOG = 
LoggerFactory.getLogger(BackupSystemTable.class);
+095
+096  static class WALItem {
+097String backupId;
+098String walFile;
+099String backupRoot;
+100
+101WALItem(String backupId, String 
walFile, String backupRoot) {
+102  this.backupId = backupId;
+103  this.walFile = walFile;
+104  this.backupRoot = backupRoot;
+105}
+106
+107public String getBackupId() {
+108  return backupId;
+109}
+110
+111public String getWalFile() {
+112  return walFile;
+113}
+114
+115public String getBackupRoot() {
+116  return backupRoot;
+117}
+118
+119@Override
+120public String toString() {
+121  return Path.SEPARATOR + backupRoot 
+ Path.SEPARATOR + backupId + Path.SEPARATOR + walFile;
+122}
+123  }
 124
-125  }
-126
-127  /**
-128   * Backup system table (main) name
-129   */
-130  private TableName tableName;
-131
-132  /**
-133   * Backup System table name for bulk 
loaded files.
-134   * We keep all bulk loaded file 
references in a separate table
-135   * because we have to isolate general 
backup operations: create, merge etc
-136   * from activity of RegionObserver, 
which controls process of a bulk loading
-137   * {@link 
org.apache.hadoop.hbase.backup.BackupObserver}
-138   */
-139
-140  private TableName bulkLoadTableName;
-141
-142  /**
-143   * Stores backup sessions (contexts)
-144   */
-145  final static byte[] SESSIONS_FAMILY = 
"session".getBytes();
-146  /**
-147   * Stores other meta
-148   */
-149  final static byte[] META_FAMILY = 
"meta".getBytes();
-150  final static byte[] BULK_LOAD_FAMILY = 
"bulk".getBytes();
-151  /**
-152   * Connection to HBase cluster, shared 
among all instances
-153   */
-154  private final Connection connection;
-155
-156  private final static String 
BACKUP_INFO_PREFIX = "session:";
-157  private final static String 
START_CODE_ROW = "startcode:";
-158  private final static byte[] 
ACTIVE_SESSION_ROW = "activesession:".getBytes();
-159  private final static byte[] 
ACTIVE_SESSION_COL = "c".getBytes();
+125  /**
+126   * Backup system table (main) name
+127   */
+128  private TableName tableName;
+129
+130  /**
+131   * Backup System table name for bulk 
loaded files.
+132   * We keep all bulk loaded file 
references in a separate table
+133   * because we have to isolate general 
backup operations: create, merge etc
+134   * from activity of RegionObserver, 
which controls process of a bulk loading
+135   * {@link 
org.apache.hadoop.hbase.backup.BackupObserver}
+136   */
+137  private TableName bulkLoadTableName;
+138
+139  /**
+140   * Stores backup sessions (contexts)
+141   */
+142  final static byte[] SESSIONS_FAMILY = 
"session".getBytes();
+143  /**
+144   * Stores other meta
+145   */
+146  final static byte[] META_FAMILY = 
"meta".getBytes();
+147  final static byte[] BULK_LOAD_FAMILY = 
"bulk".getBytes();
+148  /**
+149   * Connection to HBase cluster, shared 
among all instances
+150   */
+151  private final Connection connection;
+152
+153  private final static String 
BACKUP_INFO_PREFIX = "session:";
+154  private final static String 
START_CODE_ROW = "startcode:";
+155  private final static byte[] 
ACTIVE_SESSION_ROW = 

[47/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
 
b/devapidocs/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
index 31fd7ec..7315fa4 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/backup/BackupRestoreConstants.BackupCommand.html
@@ -122,7 +122,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-public static enum BackupRestoreConstants.BackupCommand
+public static enum BackupRestoreConstants.BackupCommand
 extends http://docs.oracle.com/javase/8/docs/api/java/lang/Enum.html?is-external=true;
 title="class or interface in java.lang">EnumBackupRestoreConstants.BackupCommand
 
 
@@ -261,7 +261,7 @@ the order they are declared.
 
 
 CREATE
-public static finalBackupRestoreConstants.BackupCommand CREATE
+public static finalBackupRestoreConstants.BackupCommand CREATE
 
 
 
@@ -270,7 +270,7 @@ the order they are declared.
 
 
 CANCEL
-public static finalBackupRestoreConstants.BackupCommand CANCEL
+public static finalBackupRestoreConstants.BackupCommand CANCEL
 
 
 
@@ -279,7 +279,7 @@ the order they are declared.
 
 
 DELETE
-public static finalBackupRestoreConstants.BackupCommand DELETE
+public static finalBackupRestoreConstants.BackupCommand DELETE
 
 
 
@@ -288,7 +288,7 @@ the order they are declared.
 
 
 DESCRIBE
-public static finalBackupRestoreConstants.BackupCommand DESCRIBE
+public static finalBackupRestoreConstants.BackupCommand DESCRIBE
 
 
 
@@ -297,7 +297,7 @@ the order they are declared.
 
 
 HISTORY
-public static finalBackupRestoreConstants.BackupCommand HISTORY
+public static finalBackupRestoreConstants.BackupCommand HISTORY
 
 
 
@@ -306,7 +306,7 @@ the order they are declared.
 
 
 STATUS
-public static finalBackupRestoreConstants.BackupCommand STATUS
+public static finalBackupRestoreConstants.BackupCommand STATUS
 
 
 
@@ -315,7 +315,7 @@ the order they are declared.
 
 
 CONVERT
-public static finalBackupRestoreConstants.BackupCommand CONVERT
+public static finalBackupRestoreConstants.BackupCommand CONVERT
 
 
 
@@ -324,7 +324,7 @@ the order they are declared.
 
 
 MERGE
-public static finalBackupRestoreConstants.BackupCommand MERGE
+public static finalBackupRestoreConstants.BackupCommand MERGE
 
 
 
@@ -333,7 +333,7 @@ the order they are declared.
 
 
 STOP
-public static finalBackupRestoreConstants.BackupCommand STOP
+public static finalBackupRestoreConstants.BackupCommand STOP
 
 
 
@@ -342,7 +342,7 @@ the order they are declared.
 
 
 SHOW
-public static finalBackupRestoreConstants.BackupCommand SHOW
+public static finalBackupRestoreConstants.BackupCommand SHOW
 
 
 
@@ -351,7 +351,7 @@ the order they are declared.
 
 
 HELP
-public static finalBackupRestoreConstants.BackupCommand HELP
+public static finalBackupRestoreConstants.BackupCommand HELP
 
 
 
@@ -360,7 +360,7 @@ the order they are declared.
 
 
 PROGRESS
-public static finalBackupRestoreConstants.BackupCommand PROGRESS
+public static finalBackupRestoreConstants.BackupCommand PROGRESS
 
 
 
@@ -369,7 +369,7 @@ the order they are declared.
 
 
 SET
-public static finalBackupRestoreConstants.BackupCommand SET
+public static finalBackupRestoreConstants.BackupCommand SET
 
 
 
@@ -378,7 +378,7 @@ the order they are declared.
 
 
 SET_ADD
-public static finalBackupRestoreConstants.BackupCommand SET_ADD
+public static finalBackupRestoreConstants.BackupCommand SET_ADD
 
 
 
@@ -387,7 +387,7 @@ the order they are declared.
 
 
 SET_REMOVE
-public static finalBackupRestoreConstants.BackupCommand SET_REMOVE
+public static finalBackupRestoreConstants.BackupCommand SET_REMOVE
 
 
 
@@ -396,7 +396,7 @@ the order they are declared.
 
 
 SET_DELETE
-public static finalBackupRestoreConstants.BackupCommand SET_DELETE
+public static finalBackupRestoreConstants.BackupCommand SET_DELETE
 
 
 
@@ -405,7 +405,7 @@ the order they are declared.
 
 
 SET_DESCRIBE
-public static finalBackupRestoreConstants.BackupCommand SET_DESCRIBE
+public static finalBackupRestoreConstants.BackupCommand SET_DESCRIBE
 
 
 
@@ -414,7 +414,7 @@ the order they are declared.
 
 
 SET_LIST
-public static finalBackupRestoreConstants.BackupCommand SET_LIST
+public static finalBackupRestoreConstants.BackupCommand SET_LIST
 
 
 
@@ -423,7 +423,7 @@ the order they are declared.
 
 
 REPAIR
-public static finalBackupRestoreConstants.BackupCommand REPAIR
+public static finalBackupRestoreConstants.BackupCommand REPAIR
 
 
 
@@ -440,7 +440,7 @@ the order they are declared.
 
 
 values
-public staticBackupRestoreConstants.BackupCommand[]values()
+public staticBackupRestoreConstants.BackupCommand[]values()
 Returns an array containing the constants of this enum 
type, in
 the order they are declared.  This 

[24/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
index eb9e252..667152a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.HistoryCommand.html
@@ -28,22 +28,22 @@
 020
 021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
 022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 039
 040import java.io.IOException;
 041import java.net.URI;
@@ -70,194 +70,194 @@
 062import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 063import 
org.apache.hadoop.hbase.client.Connection;
 064import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-065import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-066import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-067import 
org.apache.yetus.audience.InterfaceAudience;
-068
-069/**
-070 * General backup commands, options and 
usage messages
-071 */
-072
+065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+066import 
org.apache.yetus.audience.InterfaceAudience;
+067
+068import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+069
+070/**
+071 * General backup commands, options and 
usage messages
+072 */
 073@InterfaceAudience.Private
 074public final class BackupCommands {
-075
-076  public final static String 
INCORRECT_USAGE = "Incorrect usage";
-077
-078  public final static String 
TOP_LEVEL_NOT_ALLOWED =
-079  "Top level (root) folder is not 
allowed to be a backup destination";
-080
-081  public static final String USAGE = 
"Usage: 

[31/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
index 62efc6b..12f4f50 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupAdminImpl.html
@@ -52,61 +52,61 @@
 044import 
org.apache.hadoop.hbase.backup.RestoreRequest;
 045import 
org.apache.hadoop.hbase.backup.util.BackupSet;
 046import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
-047import 
org.apache.yetus.audience.InterfaceAudience;
-048import org.slf4j.Logger;
-049import org.slf4j.LoggerFactory;
-050import 
org.apache.hadoop.hbase.client.Admin;
-051import 
org.apache.hadoop.hbase.client.Connection;
-052import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-053import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-054
-055@InterfaceAudience.Private
-056public class BackupAdminImpl implements 
BackupAdmin {
-057  public final static String CHECK_OK = 
"Checking backup images: OK";
-058  public final static String CHECK_FAILED 
=
-059  "Checking backup images: Failed. 
Some dependencies are missing for restore";
-060  private static final Logger LOG = 
LoggerFactory.getLogger(BackupAdminImpl.class);
-061
-062  private final Connection conn;
-063
-064  public BackupAdminImpl(Connection conn) 
{
-065this.conn = conn;
-066  }
-067
-068  @Override
-069  public void close() throws IOException 
{
-070  }
-071
-072  @Override
-073  public BackupInfo getBackupInfo(String 
backupId) throws IOException {
-074BackupInfo backupInfo = null;
-075try (final BackupSystemTable table = 
new BackupSystemTable(conn)) {
-076  if (backupId == null) {
-077ArrayListBackupInfo 
recentSessions = table.getBackupInfos(BackupState.RUNNING);
-078if (recentSessions.isEmpty()) {
-079  LOG.warn("No ongoing sessions 
found.");
-080  return null;
-081}
-082// else show status for ongoing 
session
-083// must be one maximum
-084return recentSessions.get(0);
-085  } else {
-086backupInfo = 
table.readBackupInfo(backupId);
-087return backupInfo;
-088  }
-089}
-090  }
-091
-092  @Override
-093  public int deleteBackups(String[] 
backupIds) throws IOException {
-094
-095int totalDeleted = 0;
-096MapString, 
HashSetTableName allTablesMap = new HashMapString, 
HashSetTableName();
-097
-098boolean deleteSessionStarted = 
false;
-099boolean snapshotDone = false;
-100try (final BackupSystemTable sysTable 
= new BackupSystemTable(conn)) {
-101
+047import 
org.apache.hadoop.hbase.client.Admin;
+048import 
org.apache.hadoop.hbase.client.Connection;
+049import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+050import 
org.apache.yetus.audience.InterfaceAudience;
+051import org.slf4j.Logger;
+052import org.slf4j.LoggerFactory;
+053
+054import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+055
+056@InterfaceAudience.Private
+057public class BackupAdminImpl implements 
BackupAdmin {
+058  public final static String CHECK_OK = 
"Checking backup images: OK";
+059  public final static String CHECK_FAILED 
=
+060  "Checking backup images: Failed. 
Some dependencies are missing for restore";
+061  private static final Logger LOG = 
LoggerFactory.getLogger(BackupAdminImpl.class);
+062
+063  private final Connection conn;
+064
+065  public BackupAdminImpl(Connection conn) 
{
+066this.conn = conn;
+067  }
+068
+069  @Override
+070  public void close() {
+071  }
+072
+073  @Override
+074  public BackupInfo getBackupInfo(String 
backupId) throws IOException {
+075BackupInfo backupInfo;
+076try (final BackupSystemTable table = 
new BackupSystemTable(conn)) {
+077  if (backupId == null) {
+078ArrayListBackupInfo 
recentSessions = table.getBackupInfos(BackupState.RUNNING);
+079if (recentSessions.isEmpty()) {
+080  LOG.warn("No ongoing sessions 
found.");
+081  return null;
+082}
+083// else show status for ongoing 
session
+084// must be one maximum
+085return recentSessions.get(0);
+086  } else {
+087backupInfo = 
table.readBackupInfo(backupId);
+088return backupInfo;
+089  }
+090}
+091  }
+092
+093  @Override
+094  public int deleteBackups(String[] 
backupIds) throws IOException {
+095
+096int totalDeleted = 0;
+097MapString, 
HashSetTableName allTablesMap = new HashMap();
+098
+099boolean deleteSessionStarted;
+100boolean snapshotDone;
+101try (final BackupSystemTable sysTable 
= new BackupSystemTable(conn)) {
 102  // Step 1: Make sure there is no 
active session
 103 

[30/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
index eb9e252..667152a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
@@ -28,22 +28,22 @@
 020
 021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
 022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 039
 040import java.io.IOException;
 041import java.net.URI;
@@ -70,194 +70,194 @@
 062import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 063import 
org.apache.hadoop.hbase.client.Connection;
 064import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-065import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-066import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-067import 
org.apache.yetus.audience.InterfaceAudience;
-068
-069/**
-070 * General backup commands, options and 
usage messages
-071 */
-072
+065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+066import 
org.apache.yetus.audience.InterfaceAudience;
+067
+068import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+069
+070/**
+071 * General backup commands, options and 
usage messages
+072 */
 073@InterfaceAudience.Private
 074public final class BackupCommands {
-075
-076  public final static String 
INCORRECT_USAGE = "Incorrect usage";
-077
-078  public final static String 
TOP_LEVEL_NOT_ALLOWED =
-079  "Top level (root) folder is not 
allowed to be a backup destination";
-080
-081  public static final String USAGE = 

[43/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManifest.html
--
diff --git a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManifest.html 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManifest.html
index 0c0918e..484517d 100644
--- a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManifest.html
+++ b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupManifest.html
@@ -328,7 +328,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 LOG
-private static finalorg.slf4j.Logger LOG
+private static finalorg.slf4j.Logger LOG
 
 
 
@@ -337,7 +337,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 MANIFEST_FILE_NAME
-public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MANIFEST_FILE_NAME
+public static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String MANIFEST_FILE_NAME
 
 See Also:
 Constant
 Field Values
@@ -390,7 +390,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 BackupManifest
-publicBackupManifest(BackupInfobackup,
+publicBackupManifest(BackupInfobackup,
   TableNametable)
 Construct a table level manifest for a backup of the named 
table.
 
@@ -405,7 +405,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 BackupManifest
-publicBackupManifest(org.apache.hadoop.conf.Configurationconf,
+publicBackupManifest(org.apache.hadoop.conf.Configurationconf,
   org.apache.hadoop.fs.PathbackupPath)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 Construct manifest from a backup directory.
@@ -414,7 +414,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 conf - configuration
 backupPath - backup path
 Throws:
-http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
+http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException - if constructing 
the manifest from the backup directory fails
 
 
 
@@ -424,7 +424,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 BackupManifest
-publicBackupManifest(org.apache.hadoop.fs.FileSystemfs,
+publicBackupManifest(org.apache.hadoop.fs.FileSystemfs,
   org.apache.hadoop.fs.PathbackupPath)
throws BackupException
 Construct manifest from a backup directory.
@@ -451,7 +451,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getType
-publicBackupTypegetType()
+publicBackupTypegetType()
 
 
 
@@ -460,7 +460,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getTableList
-publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableNamegetTableList()
+publichttp://docs.oracle.com/javase/8/docs/api/java/util/List.html?is-external=true;
 title="class or interface in java.util">ListTableNamegetTableList()
 Get the table set of this image.
 
 Returns:
@@ -474,7 +474,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 store
-publicvoidstore(org.apache.hadoop.conf.Configurationconf)
+publicvoidstore(org.apache.hadoop.conf.Configurationconf)
throws BackupException
 TODO: fix it. Persist the manifest file.
 
@@ -490,7 +490,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 getBackupImage
-publicBackupManifest.BackupImagegetBackupImage()
+publicBackupManifest.BackupImagegetBackupImage()
 Get this backup image.
 
 Returns:
@@ -504,7 +504,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 addDependentImage
-publicvoidaddDependentImage(BackupManifest.BackupImageimage)
+publicvoidaddDependentImage(BackupManifest.BackupImageimage)
 Add dependent backup image for this backup.
 
 Parameters:
@@ -518,7 +518,7 @@ extends http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?
 
 
 setIncrTimestampMap
-publicvoidsetIncrTimestampMap(http://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true;
 title="class or interface in java.util">HashMapTableName,http://docs.oracle.com/javase/8/docs/api/java/util/HashMap.html?is-external=true;
 title="class or interface in java.util">HashMaphttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String,http://docs.oracle.com/javase/8/docs/api/java/lang/Long.html?is-external=true;
 title="class or 

[27/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
index eb9e252..667152a 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupCommands.DeleteCommand.html
@@ -28,22 +28,22 @@
 020
 021import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH;
 022import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_BANDWIDTH_DESC;
-023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
-024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
-025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
-026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
-027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
-028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
-029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
-030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
-031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
-032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
-033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
-034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
-035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
-036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
-037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
-038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+023import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG;
+024import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_DEBUG_DESC;
+025import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH;
+026import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_PATH_DESC;
+027import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER;
+028import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_RECORD_NUMBER_DESC;
+029import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET;
+030import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_BACKUP_DESC;
+031import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_SET_DESC;
+032import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE;
+033import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_DESC;
+034import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_TABLE_LIST_DESC;
+035import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS;
+036import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_WORKERS_DESC;
+037import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME;
+038import static 
org.apache.hadoop.hbase.backup.BackupRestoreConstants.OPTION_YARN_QUEUE_NAME_DESC;
 039
 040import java.io.IOException;
 041import java.net.URI;
@@ -70,194 +70,194 @@
 062import 
org.apache.hadoop.hbase.backup.util.BackupUtils;
 063import 
org.apache.hadoop.hbase.client.Connection;
 064import 
org.apache.hadoop.hbase.client.ConnectionFactory;
-065import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-066import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
-067import 
org.apache.yetus.audience.InterfaceAudience;
-068
-069/**
-070 * General backup commands, options and 
usage messages
-071 */
-072
+065import 
org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+066import 
org.apache.yetus.audience.InterfaceAudience;
+067
+068import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
+069
+070/**
+071 * General backup commands, options and 
usage messages
+072 */
 073@InterfaceAudience.Private
 074public final class BackupCommands {
-075
-076  public final static String 
INCORRECT_USAGE = "Incorrect usage";
-077
-078  public final static String 
TOP_LEVEL_NOT_ALLOWED =
-079  "Top level (root) folder is not 
allowed to be a backup destination";
-080
-081  public static final String USAGE = 
"Usage: hbase 

[01/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
Repository: hbase-site
Updated Branches:
  refs/heads/asf-site f9958bb15 -> aa7ffc92b


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
--
diff --git 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
index 202fee6..93f49d5 100644
--- 
a/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
+++ 
b/hbase-build-configuration/hbase-archetypes/hbase-shaded-client-project/index.html
@@ -7,7 +7,7 @@
   
 
 
-
+
 
 Apache HBase - Exemplar for hbase-shaded-client archetype  
About
 
@@ -119,7 +119,7 @@
 https://www.apache.org/;>The Apache Software 
Foundation.
 All rights reserved.  
 
-  Last Published: 
2018-01-27
+  Last Published: 
2018-01-28
 
 
 



[18/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManifest.BackupImage.Builder.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManifest.BackupImage.Builder.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManifest.BackupImage.Builder.html
index 7509dcf..ec2aa41 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManifest.BackupImage.Builder.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/BackupManifest.BackupImage.Builder.html
@@ -64,152 +64,152 @@
 056 */
 057@InterfaceAudience.Private
 058public class BackupManifest {
-059
-060  private static final Logger LOG = 
LoggerFactory.getLogger(BackupManifest.class);
-061
-062  // manifest file name
-063  public static final String 
MANIFEST_FILE_NAME = ".backup.manifest";
-064
-065  /**
-066   * Backup image, the dependency graph 
is made up by series of backup images BackupImage contains
-067   * all the relevant information to 
restore the backup and is used during restore operation
-068   */
-069
-070  public static class BackupImage 
implements ComparableBackupImage {
+059  private static final Logger LOG = 
LoggerFactory.getLogger(BackupManifest.class);
+060
+061  // manifest file name
+062  public static final String 
MANIFEST_FILE_NAME = ".backup.manifest";
+063
+064  /**
+065   * Backup image, the dependency graph 
is made up by series of backup images BackupImage contains
+066   * all the relevant information to 
restore the backup and is used during restore operation
+067   */
+068  public static class BackupImage 
implements ComparableBackupImage {
+069static class Builder {
+070  BackupImage image;
 071
-072static class Builder {
-073  BackupImage image;
-074
-075  Builder() {
-076image = new BackupImage();
-077  }
-078
-079  Builder withBackupId(String 
backupId) {
-080image.setBackupId(backupId);
-081return this;
-082  }
-083
-084  Builder withType(BackupType type) 
{
-085image.setType(type);
-086return this;
-087  }
-088
-089  Builder withRootDir(String rootDir) 
{
-090image.setRootDir(rootDir);
-091return this;
-092  }
-093
-094  Builder 
withTableList(ListTableName tableList) {
-095image.setTableList(tableList);
-096return this;
-097  }
-098
-099  Builder withStartTime(long 
startTime) {
-100image.setStartTs(startTime);
-101return this;
-102  }
-103
-104  Builder withCompleteTime(long 
completeTime) {
-105
image.setCompleteTs(completeTime);
-106return this;
-107  }
-108
-109  BackupImage build() {
-110return image;
-111  }
-112
-113}
-114
-115private String backupId;
-116private BackupType type;
-117private String rootDir;
-118private ListTableName 
tableList;
-119private long startTs;
-120private long completeTs;
-121private ArrayListBackupImage 
ancestors;
-122private HashMapTableName, 
HashMapString, Long incrTimeRanges;
-123
-124static Builder newBuilder() {
-125  return new Builder();
-126}
-127
-128public BackupImage() {
-129  super();
-130}
-131
-132private BackupImage(String backupId, 
BackupType type, String rootDir,
-133ListTableName tableList, 
long startTs, long completeTs) {
-134  this.backupId = backupId;
-135  this.type = type;
-136  this.rootDir = rootDir;
-137  this.tableList = tableList;
-138  this.startTs = startTs;
-139  this.completeTs = completeTs;
-140}
-141
-142static BackupImage 
fromProto(BackupProtos.BackupImage im) {
-143  String backupId = 
im.getBackupId();
-144  String rootDir = 
im.getBackupRootDir();
-145  long startTs = im.getStartTs();
-146  long completeTs = 
im.getCompleteTs();
-147  ListHBaseProtos.TableName 
tableListList = im.getTableListList();
-148  ListTableName tableList = 
new ArrayListTableName();
-149  for (HBaseProtos.TableName tn : 
tableListList) {
-150
tableList.add(ProtobufUtil.toTableName(tn));
-151  }
-152
-153  
ListBackupProtos.BackupImage ancestorList = im.getAncestorsList();
-154
-155  BackupType type =
-156  im.getBackupType() == 
BackupProtos.BackupType.FULL ? BackupType.FULL
-157  : BackupType.INCREMENTAL;
-158
-159  BackupImage image = new 
BackupImage(backupId, type, rootDir, tableList, startTs, completeTs);
-160  for (BackupProtos.BackupImage img : 
ancestorList) {
-161
image.addAncestor(fromProto(img));
-162  }
-163  
image.setIncrTimeRanges(loadIncrementalTimestampMap(im));
-164  return image;
-165}
-166
-167BackupProtos.BackupImage toProto() 
{
-168  BackupProtos.BackupImage.Builder 
builder = BackupProtos.BackupImage.newBuilder();
-169 

[10/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html
 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html
index 9baf566..460989c 100644
--- 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html
+++ 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/impl/RestoreTablesClient.html
@@ -71,7 +71,7 @@
 063  private String targetRootDir;
 064  private boolean isOverwrite;
 065
-066  public RestoreTablesClient(Connection 
conn, RestoreRequest request) throws IOException {
+066  public RestoreTablesClient(Connection 
conn, RestoreRequest request) {
 067this.targetRootDir = 
request.getBackupRootDir();
 068this.backupId = 
request.getBackupId();
 069this.sTableArray = 
request.getFromTables();
@@ -82,206 +82,197 @@
 074this.isOverwrite = 
request.isOverwrite();
 075this.conn = conn;
 076this.conf = 
conn.getConfiguration();
-077
-078  }
-079
-080  /**
-081   * Validate target tables
-082   * @param conn connection
-083   * @param mgr table state manager
-084   * @param tTableArray: target tables
-085   * @param isOverwrite overwrite 
existing table
-086   * @throws IOException exception
-087   */
-088  private void 
checkTargetTables(TableName[] tTableArray, boolean isOverwrite) throws 
IOException {
-089ArrayListTableName 
existTableList = new ArrayList();
-090ArrayListTableName 
disabledTableList = new ArrayList();
-091
-092// check if the tables already 
exist
-093try (Admin admin = conn.getAdmin()) 
{
-094  for (TableName tableName : 
tTableArray) {
-095if (admin.tableExists(tableName)) 
{
-096  
existTableList.add(tableName);
-097  if 
(admin.isTableDisabled(tableName)) {
-098
disabledTableList.add(tableName);
-099  }
-100} else {
-101  LOG.info("HBase table " + 
tableName
-102  + " does not exist. It will 
be created during restore process");
-103}
-104  }
-105}
-106
-107if (existTableList.size()  0) {
-108  if (!isOverwrite) {
-109LOG.error("Existing table (" + 
existTableList
-110+ ") found in the restore 
target, please add "
-111+ "\"-o\" as overwrite option 
in the command if you mean"
-112+ " to restore to these 
existing tables");
-113throw new IOException("Existing 
table found in target while no \"-o\" "
-114+ "as overwrite option 
found");
-115  } else {
-116if (disabledTableList.size()  
0) {
-117  LOG.error("Found offline table 
in the restore target, "
-118  + "please enable them 
before restore with \"-overwrite\" option");
-119  LOG.info("Offline table list in 
restore target: " + disabledTableList);
-120  throw new IOException(
-121  "Found offline table in the 
target when restore with \"-overwrite\" option");
-122}
-123  }
-124}
-125  }
-126
-127  /**
-128   * Restore operation handle each 
backupImage in array
-129   * @param svc: master services
-130   * @param images: array BackupImage
-131   * @param sTable: table to be 
restored
-132   * @param tTable: table to be restored 
to
-133   * @param truncateIfExists: truncate 
table
-134   * @throws IOException exception
-135   */
-136
-137  private void 
restoreImages(BackupImage[] images, TableName sTable, TableName tTable,
-138  boolean truncateIfExists) throws 
IOException {
-139
-140// First image MUST be image of a 
FULL backup
-141BackupImage image = images[0];
-142String rootDir = 
image.getRootDir();
-143String backupId = 
image.getBackupId();
-144Path backupRoot = new 
Path(rootDir);
-145RestoreTool restoreTool = new 
RestoreTool(conf, backupRoot, backupId);
-146Path tableBackupPath = 
HBackupFileSystem.getTableBackupPath(sTable, backupRoot, backupId);
-147String lastIncrBackupId = 
images.length == 1 ? null : images[images.length - 1].getBackupId();
-148// We need hFS only for full restore 
(see the code)
-149BackupManifest manifest = 
HBackupFileSystem.getManifest(conf, backupRoot, backupId);
-150if (manifest.getType() == 
BackupType.FULL) {
-151  LOG.info("Restoring '" + sTable + 
"' to '" + tTable + "' from full" + " backup image "
-152  + 
tableBackupPath.toString());
-153  conf.set(JOB_NAME_CONF_KEY, 
"Full_Restore-" + backupId + "-" + tTable);
-154  restoreTool.fullRestoreTable(conn, 
tableBackupPath, sTable, tTable, truncateIfExists,
-155lastIncrBackupId);
-156  conf.unset(JOB_NAME_CONF_KEY);
-157} else { // incremental Backup
-158  throw new IOException("Unexpected 
backup type " + image.getType());
-159}
-160
-161if (images.length == 1) {

[45/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
--
diff --git 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
index 274c934..3cf21ac 100644
--- 
a/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
+++ 
b/devapidocs/org/apache/hadoop/hbase/backup/impl/BackupCommands.BackupSetCommand.html
@@ -127,7 +127,7 @@ var activeTableTab = "activeTableTab";
 
 
 
-private static class BackupCommands.BackupSetCommand
+private static class BackupCommands.BackupSetCommand
 extends BackupCommands.Command
 
 
@@ -285,7 +285,7 @@ extends 
 
 SET_ADD_CMD
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String SET_ADD_CMD
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String SET_ADD_CMD
 
 See Also:
 Constant
 Field Values
@@ -298,7 +298,7 @@ extends 
 
 SET_REMOVE_CMD
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String SET_REMOVE_CMD
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String SET_REMOVE_CMD
 
 See Also:
 Constant
 Field Values
@@ -311,7 +311,7 @@ extends 
 
 SET_DELETE_CMD
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String SET_DELETE_CMD
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String SET_DELETE_CMD
 
 See Also:
 Constant
 Field Values
@@ -324,7 +324,7 @@ extends 
 
 SET_DESCRIBE_CMD
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String SET_DESCRIBE_CMD
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String SET_DESCRIBE_CMD
 
 See Also:
 Constant
 Field Values
@@ -337,7 +337,7 @@ extends 
 
 SET_LIST_CMD
-private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String SET_LIST_CMD
+private static finalhttp://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String SET_LIST_CMD
 
 See Also:
 Constant
 Field Values
@@ -358,7 +358,7 @@ extends 
 
 BackupSetCommand
-BackupSetCommand(org.apache.hadoop.conf.Configurationconf,
+BackupSetCommand(org.apache.hadoop.conf.Configurationconf,
  org.apache.commons.cli.CommandLinecmdline)
 
 
@@ -376,7 +376,7 @@ extends 
 
 execute
-publicvoidexecute()
+publicvoidexecute()
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Overrides:
@@ -392,7 +392,7 @@ extends 
 
 processSetList
-privatevoidprocessSetList(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
+privatevoidprocessSetList(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -406,7 +406,7 @@ extends 
 
 processSetDescribe
-privatevoidprocessSetDescribe(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
+privatevoidprocessSetDescribe(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
  throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or interface in java.io">IOException
 
 Throws:
@@ -420,7 +420,7 @@ extends 
 
 processSetDelete
-privatevoidprocessSetDelete(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
+privatevoidprocessSetDelete(http://docs.oracle.com/javase/8/docs/api/java/lang/String.html?is-external=true;
 title="class or interface in java.lang">String[]args)
throws http://docs.oracle.com/javase/8/docs/api/java/io/IOException.html?is-external=true;
 title="class or 

[04/51] [partial] hbase-site git commit: Published site at .

2018-01-28 Thread git-site-role
http://git-wip-us.apache.org/repos/asf/hbase-site/blob/aa7ffc92/devapidocs/src-html/org/apache/hadoop/hbase/backup/util/BackupUtils.html
--
diff --git 
a/devapidocs/src-html/org/apache/hadoop/hbase/backup/util/BackupUtils.html 
b/devapidocs/src-html/org/apache/hadoop/hbase/backup/util/BackupUtils.html
index 252efa5..c2de678 100644
--- a/devapidocs/src-html/org/apache/hadoop/hbase/backup/util/BackupUtils.html
+++ b/devapidocs/src-html/org/apache/hadoop/hbase/backup/util/BackupUtils.html
@@ -92,667 +92,663 @@
 084   */
 085  public static HashMapString, 
Long getRSLogTimestampMins(
 086  HashMapTableName, 
HashMapString, Long rsLogTimestampMap) {
-087
-088if (rsLogTimestampMap == null || 
rsLogTimestampMap.isEmpty()) {
-089  return null;
-090}
-091
-092HashMapString, Long 
rsLogTimestampMins = new HashMapString, Long();
-093HashMapString, 
HashMapTableName, Long rsLogTimestampMapByRS =
-094new HashMapString, 
HashMapTableName, Long();
-095
-096for (EntryTableName, 
HashMapString, Long tableEntry : rsLogTimestampMap.entrySet()) {
-097  TableName table = 
tableEntry.getKey();
-098  HashMapString, Long 
rsLogTimestamp = tableEntry.getValue();
-099  for (EntryString, Long 
rsEntry : rsLogTimestamp.entrySet()) {
-100String rs = rsEntry.getKey();
-101Long ts = rsEntry.getValue();
-102if 
(!rsLogTimestampMapByRS.containsKey(rs)) {
-103  rsLogTimestampMapByRS.put(rs, 
new HashMapTableName, Long());
+087if (rsLogTimestampMap == null || 
rsLogTimestampMap.isEmpty()) {
+088  return null;
+089}
+090
+091HashMapString, Long 
rsLogTimestampMins = new HashMap();
+092HashMapString, 
HashMapTableName, Long rsLogTimestampMapByRS = new 
HashMap();
+093
+094for (EntryTableName, 
HashMapString, Long tableEntry : rsLogTimestampMap.entrySet()) {
+095  TableName table = 
tableEntry.getKey();
+096  HashMapString, Long 
rsLogTimestamp = tableEntry.getValue();
+097  for (EntryString, Long 
rsEntry : rsLogTimestamp.entrySet()) {
+098String rs = rsEntry.getKey();
+099Long ts = rsEntry.getValue();
+100if 
(!rsLogTimestampMapByRS.containsKey(rs)) {
+101  rsLogTimestampMapByRS.put(rs, 
new HashMap());
+102  
rsLogTimestampMapByRS.get(rs).put(table, ts);
+103} else {
 104  
rsLogTimestampMapByRS.get(rs).put(table, ts);
-105} else {
-106  
rsLogTimestampMapByRS.get(rs).put(table, ts);
-107}
-108  }
-109}
-110
-111for (EntryString, 
HashMapTableName, Long entry : rsLogTimestampMapByRS.entrySet()) 
{
-112  String rs = entry.getKey();
-113  rsLogTimestampMins.put(rs, 
BackupUtils.getMinValue(entry.getValue()));
-114}
-115
-116return rsLogTimestampMins;
-117  }
-118
-119  /**
-120   * copy out Table RegionInfo into 
incremental backup image need to consider move this logic into
-121   * HBackupFileSystem
-122   * @param conn connection
-123   * @param backupInfo backup info
-124   * @param conf configuration
-125   * @throws IOException exception
-126   * @throws InterruptedException 
exception
-127   */
-128  public static void
-129  copyTableRegionInfo(Connection 
conn, BackupInfo backupInfo, Configuration conf)
-130  throws IOException, 
InterruptedException {
-131Path rootDir = 
FSUtils.getRootDir(conf);
-132FileSystem fs = 
rootDir.getFileSystem(conf);
-133
-134// for each table in the table set, 
copy out the table info and region
-135// info files in the correct 
directory structure
-136for (TableName table : 
backupInfo.getTables()) {
-137
-138  if 
(!MetaTableAccessor.tableExists(conn, table)) {
-139LOG.warn("Table " + table + " 
does not exists, skipping it.");
-140continue;
-141  }
-142  TableDescriptor orig = 
FSTableDescriptors.getTableDescriptorFromFs(fs, rootDir, table);
-143
-144  // write a copy of descriptor to 
the target directory
-145  Path target = new 
Path(backupInfo.getTableBackupDir(table));
-146  FileSystem targetFs = 
target.getFileSystem(conf);
-147  FSTableDescriptors descriptors =
-148  new FSTableDescriptors(conf, 
targetFs, FSUtils.getRootDir(conf));
-149  
descriptors.createTableDescriptorForTableDirectory(target, orig, false);
-150  LOG.debug("Attempting to copy table 
info for:" + table + " target: " + target
-151  + " descriptor: " + orig);
-152  LOG.debug("Finished copying 
tableinfo.");
-153  ListRegionInfo regions = 
null;
-154  regions = 
MetaTableAccessor.getTableRegions(conn, table);
-155  // For each region, write the 
region info to disk
-156  LOG.debug("Starting to write region 
info for table " + table);
-157  for (RegionInfo regionInfo : 
regions) {
-158Path regionDir =
-159HRegion.getRegionDir(new 
Path(backupInfo.getTableBackupDir(table)), regionInfo);
-160

hbase git commit: HBASE-19869 TestLockManager now uses the right class for logging

2018-01-28 Thread janh
Repository: hbase
Updated Branches:
  refs/heads/branch-2 b355af0fa -> 380169eaf


HBASE-19869 TestLockManager now uses the right class for logging


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/380169ea
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/380169ea
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/380169ea

Branch: refs/heads/branch-2
Commit: 380169eaf24f5a5d660aed0fae886e7783968a41
Parents: b355af0
Author: Jan Hentschel 
Authored: Fri Jan 26 21:04:33 2018 +0100
Committer: Jan Hentschel 
Committed: Sun Jan 28 14:04:46 2018 +0100

--
 .../org/apache/hadoop/hbase/master/locking/TestLockManager.java| 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/380169ea/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
index 80a9b7b..908504e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
@@ -55,7 +55,7 @@ public class TestLockManager {
   // crank this up if this test turns out to be flaky.
   private static final int LOCAL_LOCKS_TIMEOUT = 1000;
 
-  private static final Logger LOG = 
LoggerFactory.getLogger(TestLockProcedure.class);
+  private static final Logger LOG = 
LoggerFactory.getLogger(TestLockManager.class);
   protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
   private static MasterServices masterServices;
 



hbase git commit: HBASE-19869 TestLockManager now uses the right class for logging

2018-01-28 Thread janh
Repository: hbase
Updated Branches:
  refs/heads/master c2236b77c -> 851e17987


HBASE-19869 TestLockManager now uses the right class for logging


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/851e1798
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/851e1798
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/851e1798

Branch: refs/heads/master
Commit: 851e17987d2d5edc65b0c874d8bfdd6e657e7945
Parents: c2236b7
Author: Jan Hentschel 
Authored: Fri Jan 26 21:04:33 2018 +0100
Committer: Jan Hentschel 
Committed: Sun Jan 28 13:54:57 2018 +0100

--
 .../org/apache/hadoop/hbase/master/locking/TestLockManager.java| 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/851e1798/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
index 80a9b7b..908504e 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/locking/TestLockManager.java
@@ -55,7 +55,7 @@ public class TestLockManager {
   // crank this up if this test turns out to be flaky.
   private static final int LOCAL_LOCKS_TIMEOUT = 1000;
 
-  private static final Logger LOG = 
LoggerFactory.getLogger(TestLockProcedure.class);
+  private static final Logger LOG = 
LoggerFactory.getLogger(TestLockManager.class);
   protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
   private static MasterServices masterServices;
 



[1/3] hbase git commit: HBASE-19765 Fixed Checkstyle errors in hbase-backup

2018-01-28 Thread janh
Repository: hbase
Updated Branches:
  refs/heads/master f1502a3ac -> c2236b77c


http://git-wip-us.apache.org/repos/asf/hbase/blob/c2236b77/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java
--
diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java
 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java
index 9a2825e..18548f5 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java
@@ -84,14 +84,12 @@ public final class BackupUtils {
*/
   public static HashMap getRSLogTimestampMins(
   HashMap> rsLogTimestampMap) {
-
 if (rsLogTimestampMap == null || rsLogTimestampMap.isEmpty()) {
   return null;
 }
 
-HashMap rsLogTimestampMins = new HashMap();
-HashMap> rsLogTimestampMapByRS =
-new HashMap>();
+HashMap rsLogTimestampMins = new HashMap<>();
+HashMap> rsLogTimestampMapByRS = new 
HashMap<>();
 
 for (Entry> tableEntry : 
rsLogTimestampMap.entrySet()) {
   TableName table = tableEntry.getKey();
@@ -100,7 +98,7 @@ public final class BackupUtils {
 String rs = rsEntry.getKey();
 Long ts = rsEntry.getValue();
 if (!rsLogTimestampMapByRS.containsKey(rs)) {
-  rsLogTimestampMapByRS.put(rs, new HashMap());
+  rsLogTimestampMapByRS.put(rs, new HashMap<>());
   rsLogTimestampMapByRS.get(rs).put(table, ts);
 } else {
   rsLogTimestampMapByRS.get(rs).put(table, ts);
@@ -123,18 +121,15 @@ public final class BackupUtils {
* @param backupInfo backup info
* @param conf configuration
* @throws IOException exception
-   * @throws InterruptedException exception
*/
-  public static void
-  copyTableRegionInfo(Connection conn, BackupInfo backupInfo, 
Configuration conf)
-  throws IOException, InterruptedException {
+  public static void copyTableRegionInfo(Connection conn, BackupInfo 
backupInfo, Configuration conf)
+  throws IOException {
 Path rootDir = FSUtils.getRootDir(conf);
 FileSystem fs = rootDir.getFileSystem(conf);
 
 // for each table in the table set, copy out the table info and region
 // info files in the correct directory structure
 for (TableName table : backupInfo.getTables()) {
-
   if (!MetaTableAccessor.tableExists(conn, table)) {
 LOG.warn("Table " + table + " does not exists, skipping it.");
 continue;
@@ -150,8 +145,7 @@ public final class BackupUtils {
   LOG.debug("Attempting to copy table info for:" + table + " target: " + 
target
   + " descriptor: " + orig);
   LOG.debug("Finished copying tableinfo.");
-  List regions = null;
-  regions = MetaTableAccessor.getTableRegions(conn, table);
+  List regions = MetaTableAccessor.getTableRegions(conn, 
table);
   // For each region, write the region info to disk
   LOG.debug("Starting to write region info for table " + table);
   for (RegionInfo regionInfo : regions) {
@@ -210,10 +204,8 @@ public final class BackupUtils {
* Returns WAL file name
* @param walFileName WAL file name
* @return WAL file name
-   * @throws IOException exception
-   * @throws IllegalArgumentException exception
*/
-  public static String getUniqueWALFileNamePart(String walFileName) throws 
IOException {
+  public static String getUniqueWALFileNamePart(String walFileName) {
 return getUniqueWALFileNamePart(new Path(walFileName));
   }
 
@@ -221,9 +213,8 @@ public final class BackupUtils {
* Returns WAL file name
* @param p WAL file path
* @return WAL file name
-   * @throws IOException exception
*/
-  public static String getUniqueWALFileNamePart(Path p) throws IOException {
+  public static String getUniqueWALFileNamePart(Path p) {
 return p.getName();
   }
 
@@ -261,27 +252,23 @@ public final class BackupUtils {
 Path rootDir = FSUtils.getRootDir(c);
 Path logDir = new Path(rootDir, HConstants.HREGION_LOGDIR_NAME);
 Path oldLogDir = new Path(rootDir, HConstants.HREGION_OLDLOGDIR_NAME);
-List logFiles = new ArrayList();
-
-PathFilter filter = new PathFilter() {
+List logFiles = new ArrayList<>();
 
-  @Override
-  public boolean accept(Path p) {
-try {
-  if (AbstractFSWALProvider.isMetaFile(p)) {
-return false;
-  }
-  String host = parseHostNameFromLogFile(p);
-  if (host == null) {
-return false;
-  }
-  Long oldTimestamp = hostTimestampMap.get(host);
-  Long currentLogTS = 

[3/3] hbase git commit: HBASE-19765 Fixed Checkstyle errors in hbase-backup

2018-01-28 Thread janh
HBASE-19765 Fixed Checkstyle errors in hbase-backup


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c2236b77
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c2236b77
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c2236b77

Branch: refs/heads/master
Commit: c2236b77cb04cf7b92576113a7c9b87b244ff2e4
Parents: f1502a3
Author: Jan Hentschel 
Authored: Sat Jan 27 02:07:52 2018 +0100
Committer: Jan Hentschel 
Committed: Sun Jan 28 13:45:45 2018 +0100

--
 .../hbase/backup/BackupClientFactory.java   |   9 +-
 .../hadoop/hbase/backup/BackupCopyJob.java  |   3 +-
 .../hadoop/hbase/backup/BackupHFileCleaner.java |  36 ++--
 .../apache/hadoop/hbase/backup/BackupInfo.java  |  27 ++-
 .../hadoop/hbase/backup/BackupMergeJob.java |   6 +-
 .../hbase/backup/BackupRestoreConstants.java| 124 ++-
 .../hbase/backup/BackupRestoreFactory.java  |   3 +-
 .../hadoop/hbase/backup/BackupTableInfo.java|   5 +-
 .../hadoop/hbase/backup/HBackupFileSystem.java  |   6 +-
 .../apache/hadoop/hbase/backup/LogUtils.java|   3 +-
 .../hadoop/hbase/backup/RestoreDriver.java  |  22 +-
 .../apache/hadoop/hbase/backup/RestoreJob.java  |   3 +-
 .../hadoop/hbase/backup/RestoreRequest.java |   4 +-
 .../hbase/backup/impl/BackupAdminImpl.java  | 158 ++
 .../hbase/backup/impl/BackupCommands.java   | 205 ++-
 .../hadoop/hbase/backup/impl/BackupManager.java |  28 +--
 .../hbase/backup/impl/BackupManifest.java   |  44 ++--
 .../hbase/backup/impl/BackupSystemTable.java| 146 ++---
 .../backup/impl/FullTableBackupClient.java  |  26 ++-
 .../backup/impl/IncrementalBackupManager.java   |  37 ++--
 .../impl/IncrementalTableBackupClient.java  |  47 ++---
 .../hbase/backup/impl/RestoreTablesClient.java  |  37 ++--
 .../hbase/backup/impl/TableBackupClient.java|  35 ++--
 .../mapreduce/MapReduceBackupMergeJob.java  |  43 ++--
 .../mapreduce/MapReduceHFileSplitterJob.java|   2 +-
 .../backup/mapreduce/MapReduceRestoreJob.java   |  23 +--
 .../hbase/backup/master/BackupLogCleaner.java   |  11 +-
 .../master/LogRollMasterProcedureManager.java   |  20 +-
 .../regionserver/LogRollBackupSubprocedure.java |  13 +-
 .../LogRollBackupSubprocedurePool.java  |   8 +-
 .../LogRollRegionServerProcedureManager.java|   7 +-
 .../hadoop/hbase/backup/util/BackupUtils.java   | 108 +-
 .../hadoop/hbase/backup/util/RestoreTool.java   |  49 ++---
 .../hadoop/hbase/backup/TestBackupBase.java |  54 +++--
 .../hbase/backup/TestBackupBoundaryTests.java   |  19 +-
 .../hadoop/hbase/backup/TestBackupDelete.java   |  12 +-
 .../hbase/backup/TestBackupDeleteRestore.java   |   5 +-
 .../backup/TestBackupDeleteWithFailures.java|  47 ++---
 .../hadoop/hbase/backup/TestBackupDescribe.java |  10 +-
 .../hbase/backup/TestBackupHFileCleaner.java|  12 +-
 .../hbase/backup/TestBackupShowHistory.java |  35 ++--
 .../hbase/backup/TestBackupStatusProgress.java  |   7 +-
 .../hbase/backup/TestBackupSystemTable.java |  16 +-
 .../hadoop/hbase/backup/TestFullBackupSet.java  |  10 +-
 .../hadoop/hbase/backup/TestFullRestore.java|  53 +++--
 .../TestIncrementalBackupMergeWithFailures.java |  33 +--
 .../TestIncrementalBackupWithBulkLoad.java  |  20 +-
 .../hadoop/hbase/backup/TestRemoteBackup.java   |  43 ++--
 .../hadoop/hbase/backup/TestRemoteRestore.java  |   9 +-
 .../backup/TestRepairAfterFailedDelete.java |   5 +-
 .../hbase/backup/TestRestoreBoundaryTests.java  |   9 +-
 .../hbase/backup/TestSystemTableSnapshot.java   |   8 +-
 52 files changed, 766 insertions(+), 939 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c2236b77/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupClientFactory.java
--
diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupClientFactory.java
 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupClientFactory.java
index 68e5c11..4c96229 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupClientFactory.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/BackupClientFactory.java
@@ -28,11 +28,12 @@ import org.apache.hadoop.hbase.client.Connection;
 import org.apache.yetus.audience.InterfaceAudience;
 
 @InterfaceAudience.Private
-public class BackupClientFactory {
+public final class BackupClientFactory {
+  private BackupClientFactory() {
+  }
 
-  public static TableBackupClient create (Connection conn, String backupId, 
BackupRequest request)
-throws IOException
-  {
+  public static TableBackupClient create(Connection conn, String backupId, 

[2/3] hbase git commit: HBASE-19765 Fixed Checkstyle errors in hbase-backup

2018-01-28 Thread janh
http://git-wip-us.apache.org/repos/asf/hbase/blob/c2236b77/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
--
diff --git 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
index cf34d14..2d6cf26 100644
--- 
a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
+++ 
b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java
@@ -89,7 +89,6 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
  * value = backupId and full WAL file name
  * 
  */
-
 @InterfaceAudience.Private
 public final class BackupSystemTable implements Closeable {
   private static final Logger LOG = 
LoggerFactory.getLogger(BackupSystemTable.class);
@@ -121,7 +120,6 @@ public final class BackupSystemTable implements Closeable {
 public String toString() {
   return Path.SEPARATOR + backupRoot + Path.SEPARATOR + backupId + 
Path.SEPARATOR + walFile;
 }
-
   }
 
   /**
@@ -136,7 +134,6 @@ public final class BackupSystemTable implements Closeable {
* from activity of RegionObserver, which controls process of a bulk loading
* {@link org.apache.hadoop.hbase.backup.BackupObserver}
*/
-
   private TableName bulkLoadTableName;
 
   /**
@@ -212,7 +209,6 @@ public final class BackupSystemTable implements Closeable {
   }
   waitForSystemTable(admin, tableName);
   waitForSystemTable(admin, bulkLoadTableName);
-
 }
   }
 
@@ -246,7 +242,6 @@ public final class BackupSystemTable implements Closeable {
   }
 }
 LOG.debug("Backup table "+tableName+" exists and available");
-
   }
 
   @Override
@@ -260,7 +255,6 @@ public final class BackupSystemTable implements Closeable {
* @throws IOException exception
*/
   public void updateBackupInfo(BackupInfo info) throws IOException {
-
 if (LOG.isTraceEnabled()) {
   LOG.trace("update backup status in backup system table for: " + 
info.getBackupId()
   + " set status=" + info.getState());
@@ -356,9 +350,7 @@ public final class BackupSystemTable implements Closeable {
* @param backupId backup id
* @throws IOException exception
*/
-
   public void deleteBackupInfo(String backupId) throws IOException {
-
 if (LOG.isTraceEnabled()) {
   LOG.trace("delete backup status in backup system table for " + backupId);
 }
@@ -447,7 +439,7 @@ public final class BackupSystemTable implements Closeable {
   String fam = null;
   String path = null;
   boolean raw = false;
-  byte[] row = null;
+  byte[] row;
   String region = null;
   for (Cell cell : res.listCells()) {
 row = CellUtil.cloneRow(cell);
@@ -465,19 +457,21 @@ public final class BackupSystemTable implements Closeable 
{
   byte[] state = CellUtil.cloneValue(cell);
   if (Bytes.equals(BackupSystemTable.BL_PREPARE, state)) {
 raw = true;
-  } else raw = false;
+  } else {
+raw = false;
+  }
 }
   }
   if (map.get(tTable) == null) {
-map.put(tTable, new HashMap());
 tblMap = map.get(tTable);
   }
   if (tblMap.get(region) == null) {
-tblMap.put(region, new HashMap>>());
+tblMap.put(region, new HashMap<>());
   }
   Map>> famMap = tblMap.get(region);
   if (famMap.get(fam) == null) {
-famMap.put(fam, new ArrayList>());
+famMap.put(fam, new ArrayList<>());
   }
   famMap.get(fam).add(new Pair<>(path, raw));
   LOG.debug("found orig " + path + " for " + fam + " of table " + 
region);
@@ -501,7 +495,11 @@ public final class BackupSystemTable implements Closeable {
   for (int idx = 0; idx < maps.length; idx++) {
 Map map = maps[idx];
 TableName tn = sTableList.get(idx);
-if (map == null) continue;
+
+if (map == null) {
+  continue;
+}
+
 for (Map.Entry entry : map.entrySet()) {
   byte[] fam = entry.getKey();
   List paths = entry.getValue();
@@ -524,7 +522,6 @@ public final class BackupSystemTable implements Closeable {
* @param backupId backup id
* @return Current status of backup session or null
*/
-
   public BackupInfo readBackupInfo(String backupId) throws IOException {
 if (LOG.isTraceEnabled()) {
   LOG.trace("read backup status from backup system table for: " + 
backupId);
@@ -585,7 +582,8 @@ public final class