This is an automated email from the ASF dual-hosted git repository.
zhangduo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/master by this push:
new 998b841 HBASE-22473 Split TestSCP
998b841 is described below
commit 998b8416cabccd3743a4385b219eb1593746da5d
Author: zhangduo <[email protected]>
AuthorDate: Sun May 26 22:33:20 2019 +0800
HBASE-22473 Split TestSCP
---
.../hadoop/hbase/master/procedure/TestSCP.java | 137 ++-------------------
.../procedure/{TestSCP.java => TestSCPBase.java} | 128 ++++++++-----------
.../hbase/master/procedure/TestSCPWithMeta.java | 38 ++++++
.../procedure/TestSCPWithMetaWithReplicas.java | 45 +++++++
...CPWithMetaWithReplicasWithoutZKCoordinated.java | 40 ++++++
.../TestSCPWithMetaWithoutZKCoordinated.java | 40 ++++++
.../master/procedure/TestSCPWithReplicas.java | 52 +-------
.../hbase/master/procedure/TestSCPWithoutMeta.java | 38 ++++++
.../procedure/TestSCPWithoutMetaWithReplicas.java | 45 +++++++
...ithoutMetaWithReplicasWithoutZKCoordinated.java | 42 +++++++
.../TestSCPWithoutMetaWithoutZKCoordinated.java | 40 ++++++
11 files changed, 387 insertions(+), 258 deletions(-)
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCP.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCP.java
index b3fbefc..72b1ef8 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCP.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCP.java
@@ -20,13 +20,8 @@ package org.apache.hadoop.hbase.master.procedure;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
@@ -35,142 +30,34 @@ import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.assignment.AssignmentTestingUtil;
import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
-import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
-import org.junit.After;
-import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@Category({MasterTests.class, LargeTests.class})
-public class TestSCP {
+@Category({ MasterTests.class, LargeTests.class })
+public class TestSCP extends TestSCPBase {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestSCP.class);
private static final Logger LOG = LoggerFactory.getLogger(TestSCP.class);
- protected HBaseTestingUtility util;
-
- protected void setupConf(Configuration conf) {
- conf.setInt(MasterProcedureConstants.MASTER_PROCEDURE_THREADS, 1);
- conf.set("hbase.balancer.tablesOnMaster", "none");
- conf.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 3);
- conf.setInt(HConstants.HBASE_CLIENT_SERVERSIDE_RETRIES_MULTIPLIER, 3);
- conf.setBoolean("hbase.split.writer.creation.bounded", true);
- conf.setInt("hbase.regionserver.hlog.splitlog.writer.threads", 8);
- conf.setBoolean(HConstants.HBASE_SPLIT_WAL_COORDINATED_BY_ZK, true);
- }
-
- @Before
- public void setup() throws Exception {
- this.util = new HBaseTestingUtility();
- setupConf(this.util.getConfiguration());
- startMiniCluster();
- ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(
- this.util.getHBaseCluster().getMaster().getMasterProcedureExecutor(),
false);
- }
-
- protected void startMiniCluster() throws Exception {
- this.util.startMiniCluster(3);
- }
-
- @After
- public void tearDown() throws Exception {
- MiniHBaseCluster cluster = this.util.getHBaseCluster();
- HMaster master = cluster == null? null: cluster.getMaster();
- if (master != null && master.getMasterProcedureExecutor() != null) {
- ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(
- master.getMasterProcedureExecutor(), false);
- }
- this.util.shutdownMiniCluster();
- }
-
-
@Test
public void testCrashTargetRs() throws Exception {
testRecoveryAndDoubleExecution(false, false);
}
@Test
- public void testRecoveryAndDoubleExecutionOnRsWithMeta() throws Exception {
- testRecoveryAndDoubleExecution(true, true);
- }
-
- @Test
- public void testRecoveryAndDoubleExecutionOnRsWithoutMeta() throws Exception
{
- testRecoveryAndDoubleExecution(false, true);
- }
-
- private long getSCPProcId(ProcedureExecutor<?> procExec) {
- util.waitFor(30000, () -> !procExec.getProcedures().isEmpty());
- return
procExec.getActiveProcIds().stream().mapToLong(Long::longValue).min().getAsLong();
- }
-
- /**
- * Run server crash procedure steps twice to test idempotency and that we
are persisting all
- * needed state.
- */
- private void testRecoveryAndDoubleExecution(boolean carryingMeta, boolean
doubleExecution)
- throws Exception {
- final TableName tableName =
TableName.valueOf("testRecoveryAndDoubleExecution-carryingMeta-"
- + carryingMeta + "-doubleExecution-" + doubleExecution);
- try (Table t = createTable(tableName)) {
- // Load the table with a bit of data so some logs to split and some
edits in each region.
- this.util.loadTable(t, HBaseTestingUtility.COLUMNS[0]);
- final int count = util.countRows(t);
- assertTrue("expected some rows", count > 0);
- final String checksum = util.checksumRows(t);
- // Run the procedure executor outside the master so we can mess with it.
Need to disable
- // Master's running of the server crash processing.
- final HMaster master = this.util.getHBaseCluster().getMaster();
- final ProcedureExecutor<MasterProcedureEnv> procExec =
master.getMasterProcedureExecutor();
- // find the first server that match the request and executes the test
- ServerName rsToKill = null;
- for (RegionInfo hri : util.getAdmin().getRegions(tableName)) {
- final ServerName serverName =
AssignmentTestingUtil.getServerHoldingRegion(util, hri);
- if (AssignmentTestingUtil.isServerHoldingMeta(util, serverName) ==
carryingMeta) {
- rsToKill = serverName;
- break;
- }
- }
- // Enable test flags and then queue the crash procedure.
- ProcedureTestingUtility.waitNoProcedureRunning(procExec);
- if (doubleExecution) {
- // For SCP, if you enable this then we will enter an infinite loop, as
we will crash between
- // queue and open for TRSP, and then going back to queue, as we will
use the crash rs as the
- // target server since it is recored in hbase:meta.
- ProcedureTestingUtility.setKillIfHasParent(procExec, false);
- ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec,
true);
- // kill the RS
- AssignmentTestingUtil.killRs(util, rsToKill);
- long procId = getSCPProcId(procExec);
- // Now run through the procedure twice crashing the executor on each
step...
- MasterProcedureTestingUtility.testRecoveryAndDoubleExecution(procExec,
procId);
- } else {
- // kill the RS
- AssignmentTestingUtil.killRs(util, rsToKill);
- long procId = getSCPProcId(procExec);
- ProcedureTestingUtility.waitProcedure(procExec, procId);
- }
- assertReplicaDistributed(t);
- assertEquals(count, util.countRows(t));
- assertEquals(checksum, util.checksumRows(t));
- }
- }
-
- @Test
public void testConcurrentSCPForSameServer() throws Exception {
- final TableName tableName =
- TableName.valueOf("testConcurrentSCPForSameServer");
+ final TableName tableName =
TableName.valueOf("testConcurrentSCPForSameServer");
try (Table t = createTable(tableName)) {
// Load the table with a bit of data so some logs to split and some
edits in each region.
this.util.loadTable(t, HBaseTestingUtility.COLUMNS[0]);
- final int count = util.countRows(t);
+ final int count = HBaseTestingUtility.countRows(t);
assertTrue("expected some rows", count > 0);
// find the first server that match the request and executes the test
ServerName rsToKill = null;
@@ -184,15 +71,15 @@ public class TestSCP {
HMaster master = util.getHBaseCluster().getMaster();
final ProcedureExecutor<MasterProcedureEnv> pExecutor =
master.getMasterProcedureExecutor();
ServerCrashProcedure procB =
- new ServerCrashProcedure(pExecutor.getEnvironment(), rsToKill,
false, false);
+ new ServerCrashProcedure(pExecutor.getEnvironment(), rsToKill, false,
false);
AssignmentTestingUtil.killRs(util, rsToKill);
long procId = getSCPProcId(pExecutor);
- Procedure procA = pExecutor.getProcedure(procId);
+ Procedure<?> procA = pExecutor.getProcedure(procId);
LOG.info("submit SCP procedureA");
util.waitFor(5000, () -> procA.hasLock());
LOG.info("procedureA acquired the lock");
assertEquals(Procedure.LockState.LOCK_EVENT_WAIT,
- procB.acquireLock(pExecutor.getEnvironment()));
+ procB.acquireLock(pExecutor.getEnvironment()));
LOG.info("procedureB should not be able to get the lock");
util.waitFor(60000,
() -> procB.acquireLock(pExecutor.getEnvironment()) ==
Procedure.LockState.LOCK_ACQUIRED);
@@ -200,14 +87,4 @@ public class TestSCP {
assertTrue(procA.isFinished());
}
}
-
- protected void assertReplicaDistributed(final Table t) {
- return;
- }
-
- protected Table createTable(final TableName tableName) throws IOException {
- final Table t = this.util.createTable(tableName,
HBaseTestingUtility.COLUMNS,
- HBaseTestingUtility.KEYS_FOR_HBA_CREATE_TABLE);
- return t;
- }
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCP.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPBase.java
similarity index 63%
copy from
hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCP.java
copy to
hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPBase.java
index b3fbefc..b3bfd2f 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCP.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPBase.java
@@ -19,40 +19,34 @@ package org.apache.hadoop.hbase.master.procedure;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import java.io.IOException;
-
+import java.util.ArrayList;
+import java.util.List;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.assignment.AssignmentTestingUtil;
-import org.apache.hadoop.hbase.procedure2.Procedure;
import org.apache.hadoop.hbase.procedure2.ProcedureExecutor;
import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility;
-import org.apache.hadoop.hbase.testclassification.LargeTests;
-import org.apache.hadoop.hbase.testclassification.MasterTests;
+import org.apache.hadoop.hbase.regionserver.Region;
+import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
import org.junit.After;
import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@Category({MasterTests.class, LargeTests.class})
-public class TestSCP {
-
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestSCP.class);
+public class TestSCPBase {
- private static final Logger LOG = LoggerFactory.getLogger(TestSCP.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestSCPBase.class);
protected HBaseTestingUtility util;
@@ -82,47 +76,26 @@ public class TestSCP {
@After
public void tearDown() throws Exception {
MiniHBaseCluster cluster = this.util.getHBaseCluster();
- HMaster master = cluster == null? null: cluster.getMaster();
+ HMaster master = cluster == null ? null : cluster.getMaster();
if (master != null && master.getMasterProcedureExecutor() != null) {
- ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(
- master.getMasterProcedureExecutor(), false);
+
ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(master.getMasterProcedureExecutor(),
+ false);
}
this.util.shutdownMiniCluster();
}
-
- @Test
- public void testCrashTargetRs() throws Exception {
- testRecoveryAndDoubleExecution(false, false);
- }
-
- @Test
- public void testRecoveryAndDoubleExecutionOnRsWithMeta() throws Exception {
- testRecoveryAndDoubleExecution(true, true);
- }
-
- @Test
- public void testRecoveryAndDoubleExecutionOnRsWithoutMeta() throws Exception
{
- testRecoveryAndDoubleExecution(false, true);
- }
-
- private long getSCPProcId(ProcedureExecutor<?> procExec) {
- util.waitFor(30000, () -> !procExec.getProcedures().isEmpty());
- return
procExec.getActiveProcIds().stream().mapToLong(Long::longValue).min().getAsLong();
- }
-
/**
* Run server crash procedure steps twice to test idempotency and that we
are persisting all
* needed state.
*/
- private void testRecoveryAndDoubleExecution(boolean carryingMeta, boolean
doubleExecution)
+ protected void testRecoveryAndDoubleExecution(boolean carryingMeta, boolean
doubleExecution)
throws Exception {
- final TableName tableName =
TableName.valueOf("testRecoveryAndDoubleExecution-carryingMeta-"
- + carryingMeta + "-doubleExecution-" + doubleExecution);
+ final TableName tableName =
TableName.valueOf("testRecoveryAndDoubleExecution-carryingMeta-" +
+ carryingMeta + "-doubleExecution-" + doubleExecution);
try (Table t = createTable(tableName)) {
// Load the table with a bit of data so some logs to split and some
edits in each region.
this.util.loadTable(t, HBaseTestingUtility.COLUMNS[0]);
- final int count = util.countRows(t);
+ final int count = HBaseTestingUtility.countRows(t);
assertTrue("expected some rows", count > 0);
final String checksum = util.checksumRows(t);
// Run the procedure executor outside the master so we can mess with it.
Need to disable
@@ -158,56 +131,53 @@ public class TestSCP {
ProcedureTestingUtility.waitProcedure(procExec, procId);
}
assertReplicaDistributed(t);
- assertEquals(count, util.countRows(t));
+ assertEquals(count, HBaseTestingUtility.countRows(t));
assertEquals(checksum, util.checksumRows(t));
}
}
- @Test
- public void testConcurrentSCPForSameServer() throws Exception {
- final TableName tableName =
- TableName.valueOf("testConcurrentSCPForSameServer");
- try (Table t = createTable(tableName)) {
- // Load the table with a bit of data so some logs to split and some
edits in each region.
- this.util.loadTable(t, HBaseTestingUtility.COLUMNS[0]);
- final int count = util.countRows(t);
- assertTrue("expected some rows", count > 0);
- // find the first server that match the request and executes the test
- ServerName rsToKill = null;
- for (RegionInfo hri : util.getAdmin().getRegions(tableName)) {
- final ServerName serverName =
AssignmentTestingUtil.getServerHoldingRegion(util, hri);
- if (AssignmentTestingUtil.isServerHoldingMeta(util, serverName) ==
true) {
- rsToKill = serverName;
- break;
+ protected long getSCPProcId(ProcedureExecutor<?> procExec) {
+ util.waitFor(30000, () -> !procExec.getProcedures().isEmpty());
+ return
procExec.getActiveProcIds().stream().mapToLong(Long::longValue).min().getAsLong();
+ }
+
+ private void assertReplicaDistributed(Table t) throws IOException {
+ if (t.getDescriptor().getRegionReplication() <= 1) {
+ return;
+ }
+ // Assert all data came back.
+ List<RegionInfo> regionInfos = new ArrayList<>();
+ for (RegionServerThread rs :
this.util.getMiniHBaseCluster().getRegionServerThreads()) {
+ regionInfos.clear();
+ for (Region r : rs.getRegionServer().getRegions(t.getName())) {
+ LOG.info("The region is " + r.getRegionInfo() + " the location is " +
+ rs.getRegionServer().getServerName());
+ if (contains(regionInfos, r.getRegionInfo())) {
+ LOG.error("Am exiting");
+ fail("Crashed replica regions should not be assigned to same region
server");
+ } else {
+ regionInfos.add(r.getRegionInfo());
}
}
- HMaster master = util.getHBaseCluster().getMaster();
- final ProcedureExecutor<MasterProcedureEnv> pExecutor =
master.getMasterProcedureExecutor();
- ServerCrashProcedure procB =
- new ServerCrashProcedure(pExecutor.getEnvironment(), rsToKill,
false, false);
- AssignmentTestingUtil.killRs(util, rsToKill);
- long procId = getSCPProcId(pExecutor);
- Procedure procA = pExecutor.getProcedure(procId);
- LOG.info("submit SCP procedureA");
- util.waitFor(5000, () -> procA.hasLock());
- LOG.info("procedureA acquired the lock");
- assertEquals(Procedure.LockState.LOCK_EVENT_WAIT,
- procB.acquireLock(pExecutor.getEnvironment()));
- LOG.info("procedureB should not be able to get the lock");
- util.waitFor(60000,
- () -> procB.acquireLock(pExecutor.getEnvironment()) ==
Procedure.LockState.LOCK_ACQUIRED);
- LOG.info("when procedure B get the lock, procedure A should be
finished");
- assertTrue(procA.isFinished());
}
}
- protected void assertReplicaDistributed(final Table t) {
- return;
+ private boolean contains(List<RegionInfo> regionInfos, RegionInfo
regionInfo) {
+ for (RegionInfo info : regionInfos) {
+ if (RegionReplicaUtil.isReplicasForSameRegion(info, regionInfo)) {
+ return true;
+ }
+ }
+ return false;
}
protected Table createTable(final TableName tableName) throws IOException {
final Table t = this.util.createTable(tableName,
HBaseTestingUtility.COLUMNS,
- HBaseTestingUtility.KEYS_FOR_HBA_CREATE_TABLE);
+ HBaseTestingUtility.KEYS_FOR_HBA_CREATE_TABLE, getRegionReplication());
return t;
}
+
+ protected int getRegionReplication() {
+ return 1;
+ }
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithMeta.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithMeta.java
new file mode 100644
index 0000000..09d9d87
--- /dev/null
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithMeta.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.master.procedure;
+
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MasterTests;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({ MasterTests.class, LargeTests.class })
+public class TestSCPWithMeta extends TestSCPBase {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestSCPWithMeta.class);
+
+ @Test
+ public void testRecoveryAndDoubleExecutionOnRsWithMeta() throws Exception {
+ testRecoveryAndDoubleExecution(true, true);
+ }
+}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithMetaWithReplicas.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithMetaWithReplicas.java
new file mode 100644
index 0000000..fd6471b
--- /dev/null
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithMetaWithReplicas.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.master.procedure;
+
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MasterTests;
+import org.junit.ClassRule;
+import org.junit.experimental.categories.Category;
+
+@Category({ MasterTests.class, LargeTests.class })
+public class TestSCPWithMetaWithReplicas extends TestSCPWithMeta {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestSCPWithMetaWithReplicas.class);
+
+ @Override
+ protected void startMiniCluster() throws Exception {
+ // Start a cluster with 4 nodes because we have 3 replicas.
+ // So on a crash of a server still we can ensure that the
+ // replicas are distributed.
+ this.util.startMiniCluster(4);
+ }
+
+ @Override
+ protected int getRegionReplication() {
+ return 3;
+ }
+}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithMetaWithReplicasWithoutZKCoordinated.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithMetaWithReplicasWithoutZKCoordinated.java
new file mode 100644
index 0000000..76b8c8f
--- /dev/null
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithMetaWithReplicasWithoutZKCoordinated.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.master.procedure;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MasterTests;
+import org.junit.ClassRule;
+import org.junit.experimental.categories.Category;
+
+@Category({ MasterTests.class, LargeTests.class })
+public class TestSCPWithMetaWithReplicasWithoutZKCoordinated extends
TestSCPWithMetaWithReplicas {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+
HBaseClassTestRule.forClass(TestSCPWithMetaWithReplicasWithoutZKCoordinated.class);
+
+ @Override
+ protected void setupConf(Configuration conf) {
+ super.setupConf(conf);
+ conf.setBoolean(HConstants.HBASE_SPLIT_WAL_COORDINATED_BY_ZK, false);
+ }
+}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithMetaWithoutZKCoordinated.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithMetaWithoutZKCoordinated.java
new file mode 100644
index 0000000..b38eec2
--- /dev/null
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithMetaWithoutZKCoordinated.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.master.procedure;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MasterTests;
+import org.junit.ClassRule;
+import org.junit.experimental.categories.Category;
+
+@Category({ MasterTests.class, LargeTests.class })
+public class TestSCPWithMetaWithoutZKCoordinated extends TestSCPWithMeta {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestSCPWithMetaWithoutZKCoordinated.class);
+
+ @Override
+ protected void setupConf(Configuration conf) {
+ super.setupConf(conf);
+ conf.setBoolean(HConstants.HBASE_SPLIT_WAL_COORDINATED_BY_ZK, false);
+ }
+}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithReplicas.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithReplicas.java
index 522e820..2479053 100644
---
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithReplicas.java
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithReplicas.java
@@ -10,34 +10,18 @@
*/
package org.apache.hadoop.hbase.master.procedure;
-import static org.junit.Assert.fail;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
import org.apache.hadoop.hbase.HBaseClassTestRule;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.RegionInfo;
-import org.apache.hadoop.hbase.client.RegionReplicaUtil;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MasterTests;
-import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
import org.junit.ClassRule;
import org.junit.experimental.categories.Category;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
@Category({ MasterTests.class, LargeTests.class })
public class TestSCPWithReplicas extends TestSCP {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestSCPWithReplicas.class);
- private static final Logger LOG =
LoggerFactory.getLogger(TestSCPWithReplicas.class);
+ HBaseClassTestRule.forClass(TestSCPWithReplicas.class);
@Override
protected void startMiniCluster() throws Exception {
@@ -48,37 +32,7 @@ public class TestSCPWithReplicas extends TestSCP {
}
@Override
- protected Table createTable(final TableName tableName) throws IOException {
- final Table t = this.util.createTable(tableName,
HBaseTestingUtility.COLUMNS,
- HBaseTestingUtility.KEYS_FOR_HBA_CREATE_TABLE, 3);
- return t;
- }
-
- @Override
- protected void assertReplicaDistributed(final Table t) {
- // Assert all data came back.
- List<RegionInfo> regionInfos = new ArrayList<>();
- for (RegionServerThread rs :
this.util.getMiniHBaseCluster().getRegionServerThreads()) {
- regionInfos.clear();
- for (Region r : rs.getRegionServer().getRegions(t.getName())) {
- LOG.info("The region is " + r.getRegionInfo() + " the location is "
- + rs.getRegionServer().getServerName());
- if (contains(regionInfos, r.getRegionInfo())) {
- LOG.error("Am exiting");
- fail("Crashed replica regions should not be assigned to same region
server");
- } else {
- regionInfos.add(r.getRegionInfo());
- }
- }
- }
- }
-
- private boolean contains(List<RegionInfo> regionInfos, RegionInfo
regionInfo) {
- for (RegionInfo info : regionInfos) {
- if (RegionReplicaUtil.isReplicasForSameRegion(info, regionInfo)) {
- return true;
- }
- }
- return false;
+ protected int getRegionReplication() {
+ return 3;
}
}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithoutMeta.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithoutMeta.java
new file mode 100644
index 0000000..3347725
--- /dev/null
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithoutMeta.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.master.procedure;
+
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MasterTests;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({ MasterTests.class, LargeTests.class })
+public class TestSCPWithoutMeta extends TestSCPBase {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestSCPWithoutMeta.class);
+
+ @Test
+ public void testRecoveryAndDoubleExecutionOnRsWithoutMeta() throws Exception
{
+ testRecoveryAndDoubleExecution(false, true);
+ }
+}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithoutMetaWithReplicas.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithoutMetaWithReplicas.java
new file mode 100644
index 0000000..44f98f2
--- /dev/null
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithoutMetaWithReplicas.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.master.procedure;
+
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MasterTests;
+import org.junit.ClassRule;
+import org.junit.experimental.categories.Category;
+
+@Category({ MasterTests.class, LargeTests.class })
+public class TestSCPWithoutMetaWithReplicas extends TestSCPWithoutMeta {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestSCPWithoutMetaWithReplicas.class);
+
+ @Override
+ protected void startMiniCluster() throws Exception {
+ // Start a cluster with 4 nodes because we have 3 replicas.
+ // So on a crash of a server still we can ensure that the
+ // replicas are distributed.
+ this.util.startMiniCluster(4);
+ }
+
+ @Override
+ protected int getRegionReplication() {
+ return 3;
+ }
+}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithoutMetaWithReplicasWithoutZKCoordinated.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithoutMetaWithReplicasWithoutZKCoordinated.java
new file mode 100644
index 0000000..bf093db
--- /dev/null
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithoutMetaWithReplicasWithoutZKCoordinated.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.master.procedure;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MasterTests;
+import org.junit.ClassRule;
+import org.junit.experimental.categories.Category;
+
+@Category({ MasterTests.class, LargeTests.class })
+public class TestSCPWithoutMetaWithReplicasWithoutZKCoordinated
+ extends TestSCPWithoutMetaWithReplicas {
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+
HBaseClassTestRule.forClass(TestSCPWithoutMetaWithReplicasWithoutZKCoordinated.class);
+
+ @Override
+ protected void setupConf(Configuration conf) {
+ super.setupConf(conf);
+ conf.setBoolean(HConstants.HBASE_SPLIT_WAL_COORDINATED_BY_ZK, false);
+ }
+
+}
diff --git
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithoutMetaWithoutZKCoordinated.java
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithoutMetaWithoutZKCoordinated.java
new file mode 100644
index 0000000..54b07dc
--- /dev/null
+++
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestSCPWithoutMetaWithoutZKCoordinated.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.master.procedure;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.testclassification.LargeTests;
+import org.apache.hadoop.hbase.testclassification.MasterTests;
+import org.junit.ClassRule;
+import org.junit.experimental.categories.Category;
+
+@Category({ MasterTests.class, LargeTests.class })
+public class TestSCPWithoutMetaWithoutZKCoordinated extends TestSCPWithoutMeta
{
+
+ @ClassRule
+ public static final HBaseClassTestRule CLASS_RULE =
+ HBaseClassTestRule.forClass(TestSCPWithoutMetaWithoutZKCoordinated.class);
+
+ @Override
+ protected void setupConf(Configuration conf) {
+ super.setupConf(conf);
+ conf.setBoolean(HConstants.HBASE_SPLIT_WAL_COORDINATED_BY_ZK, false);
+ }
+}