HDFS-12677. Extend TestReconstructStripedFile with a random EC policy. 
Contributed by Takanobu Asanuma

(cherry picked from commit 39a5fbae479ecee3a563e2f4eb937471fbf666f8)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/75d3699a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/75d3699a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/75d3699a

Branch: refs/heads/branch-3.1
Commit: 75d3699a00731f6096b3524d879d663552cf9de7
Parents: c9364b3
Author: Chris Douglas <cdoug...@apache.org>
Authored: Mon Mar 12 14:29:44 2018 -0700
Committer: Chris Douglas <cdoug...@apache.org>
Committed: Mon Mar 12 14:30:06 2018 -0700

----------------------------------------------------------------------
 .../hadoop/hdfs/TestReconstructStripedFile.java | 48 +++++++++++--------
 ...econstructStripedFileWithRandomECPolicy.java | 49 ++++++++++++++++++++
 2 files changed, 78 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/75d3699a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java
index 7201e11..1e93a2d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFile.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assume.assumeTrue;
 
 import java.io.File;
 import java.io.IOException;
@@ -67,14 +68,13 @@ import org.junit.Test;
 public class TestReconstructStripedFile {
   public static final Log LOG = 
LogFactory.getLog(TestReconstructStripedFile.class);
 
-  private final ErasureCodingPolicy ecPolicy =
-      StripedFileTestUtil.getDefaultECPolicy();
-  private final int dataBlkNum = ecPolicy.getNumDataUnits();
-  private final int parityBlkNum = ecPolicy.getNumParityUnits();
-  private final int cellSize = ecPolicy.getCellSize();
-  private final int blockSize = cellSize * 3;
-  private final int groupSize = dataBlkNum + parityBlkNum;
-  private final int dnNum = groupSize + parityBlkNum;
+  private ErasureCodingPolicy ecPolicy;
+  private int dataBlkNum;
+  private int parityBlkNum;
+  private int cellSize;
+  private int blockSize;
+  private int groupSize;
+  private int dnNum;
 
   static {
     GenericTestUtils.setLogLevel(DFSClient.LOG, Level.ALL);
@@ -95,8 +95,20 @@ public class TestReconstructStripedFile {
   private Map<DatanodeID, Integer> dnMap = new HashMap<>();
   private final Random random = new Random();
 
+  public ErasureCodingPolicy getEcPolicy() {
+    return StripedFileTestUtil.getDefaultECPolicy();
+  }
+
   @Before
   public void setup() throws IOException {
+    ecPolicy = getEcPolicy();
+    dataBlkNum = ecPolicy.getNumDataUnits();
+    parityBlkNum = ecPolicy.getNumParityUnits();
+    cellSize = ecPolicy.getCellSize();
+    blockSize = cellSize * 3;
+    groupSize = dataBlkNum + parityBlkNum;
+    dnNum = groupSize + parityBlkNum;
+
     conf = new Configuration();
     conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
     conf.setInt(
@@ -114,10 +126,8 @@ public class TestReconstructStripedFile {
     cluster.waitActive();
 
     fs = cluster.getFileSystem();
-    fs.enableErasureCodingPolicy(
-        StripedFileTestUtil.getDefaultECPolicy().getName());
-    fs.getClient().setErasureCodingPolicy("/",
-        StripedFileTestUtil.getDefaultECPolicy().getName());
+    fs.enableErasureCodingPolicy(ecPolicy.getName());
+    fs.getClient().setErasureCodingPolicy("/", ecPolicy.getName());
 
     List<DataNode> datanodes = cluster.getDataNodes();
     for (int i = 0; i < dnNum; i++) {
@@ -432,7 +442,7 @@ public class TestReconstructStripedFile {
 
     BlockECReconstructionInfo invalidECInfo = new BlockECReconstructionInfo(
         new ExtendedBlock("bp-id", 123456), dataDNs, dnStorageInfo, 
liveIndices,
-        StripedFileTestUtil.getDefaultECPolicy());
+        ecPolicy);
     List<BlockECReconstructionInfo> ecTasks = new ArrayList<>();
     ecTasks.add(invalidECInfo);
     dataNode.getErasureCodingWorker().processErasureCodingTasks(ecTasks);
@@ -461,7 +471,8 @@ public class TestReconstructStripedFile {
         .numDataNodes(numDataNodes).build();
     cluster.waitActive();
     fs = cluster.getFileSystem();
-    ErasureCodingPolicy policy = StripedFileTestUtil.getDefaultECPolicy();
+    ErasureCodingPolicy policy = ecPolicy;
+    fs.enableErasureCodingPolicy(policy.getName());
     fs.getClient().setErasureCodingPolicy("/", policy.getName());
 
     final int fileLen = cellSize * ecPolicy.getNumDataUnits();
@@ -470,7 +481,8 @@ public class TestReconstructStripedFile {
     }
 
     // Inject data-loss by tear down desired number of DataNodes.
-    assertTrue(policy.getNumParityUnits() >= deadDN);
+    assumeTrue("Ignore case where num dead DNs > num parity units",
+        policy.getNumParityUnits() >= deadDN);
     List<DataNode> dataNodes = new ArrayList<>(cluster.getDataNodes());
     Collections.shuffle(dataNodes);
     for (DataNode dn : dataNodes.subList(0, deadDN)) {
@@ -516,10 +528,8 @@ public class TestReconstructStripedFile {
     cluster = new MiniDFSCluster.Builder(conf).numDataNodes(dnNum).build();
     cluster.waitActive();
     fs = cluster.getFileSystem();
-    fs.enableErasureCodingPolicy(
-        StripedFileTestUtil.getDefaultECPolicy().getName());
-    fs.getClient().setErasureCodingPolicy("/",
-        StripedFileTestUtil.getDefaultECPolicy().getName());
+    fs.enableErasureCodingPolicy(ecPolicy.getName());
+    fs.getClient().setErasureCodingPolicy("/", ecPolicy.getName());
 
     final int fileLen = cellSize * ecPolicy.getNumDataUnits() * 2;
     writeFile(fs, "/ec-xmits-weight", fileLen);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/75d3699a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFileWithRandomECPolicy.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFileWithRandomECPolicy.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFileWithRandomECPolicy.java
new file mode 100644
index 0000000..45cde4b
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReconstructStripedFileWithRandomECPolicy.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs;
+
+import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This test extends TestReconstructStripedFile to use a random
+ * (non-default) EC policy.
+ */
+public class TestReconstructStripedFileWithRandomECPolicy extends
+    TestReconstructStripedFile {
+  private static final Logger LOG = LoggerFactory.getLogger(
+      TestReconstructStripedFileWithRandomECPolicy.class);
+
+  private ErasureCodingPolicy ecPolicy;
+
+  public TestReconstructStripedFileWithRandomECPolicy() {
+    // If you want to debug this test with a specific ec policy, please use
+    // SystemErasureCodingPolicies class.
+    // e.g. ecPolicy = SystemErasureCodingPolicies.getByID(RS_3_2_POLICY_ID);
+    ecPolicy = StripedFileTestUtil.getRandomNonDefaultECPolicy();
+    LOG.info("run {} with {}.",
+        TestReconstructStripedFileWithRandomECPolicy.class
+            .getSuperclass().getSimpleName(), ecPolicy.getName());
+  }
+
+  @Override
+  public ErasureCodingPolicy getEcPolicy() {
+    return ecPolicy;
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to