[02/50] [abbrv] hadoop git commit: HDFS-7917. Use file to replace data dirs in test to simulate a disk failure. Contributed by Lei (Eddy) Xu.

2015-03-27 Thread zjshen
HDFS-7917. Use file to replace data dirs in test to simulate a disk failure. 
Contributed by Lei (Eddy) Xu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/62d47c25
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/62d47c25
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/62d47c25

Branch: refs/heads/YARN-2928
Commit: 62d47c251e663b4d97f6164eaa8f3324131034f6
Parents: 2bf393b
Author: cnauroth cnaur...@apache.org
Authored: Mon Mar 23 16:29:51 2015 -0700
Committer: Zhijie Shen zjs...@apache.org
Committed: Thu Mar 26 23:29:43 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +
 .../hdfs/server/datanode/DataNodeTestUtils.java | 61 +++-
 .../datanode/TestDataNodeHotSwapVolumes.java| 29 --
 .../datanode/TestDataNodeVolumeFailure.java | 11 +---
 .../TestDataNodeVolumeFailureReporting.java | 46 ---
 .../TestDataNodeVolumeFailureToleration.java|  8 +--
 6 files changed, 88 insertions(+), 70 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/62d47c25/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 8c99876..b88b7e3 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -774,6 +774,9 @@ Release 2.7.0 - UNRELEASED
 
 HDFS-7962. Remove duplicated logs in BlockManager. (yliu)
 
+HDFS-7917. Use file to replace data dirs in test to simulate a disk 
failure.
+(Lei (Eddy) Xu via cnauroth)
+
   OPTIMIZATIONS
 
 HDFS-7454. Reduce memory footprint for AclEntries in NameNode.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/62d47c25/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/DataNodeTestUtils.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/DataNodeTestUtils.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/DataNodeTestUtils.java
index fd51e52..f9a2ba1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/DataNodeTestUtils.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/DataNodeTestUtils.java
@@ -40,7 +40,9 @@ import com.google.common.base.Preconditions;
  * Utility class for accessing package-private DataNode information during 
tests.
  *
  */
-public class DataNodeTestUtils {  
+public class DataNodeTestUtils {
+  private static final String DIR_FAILURE_SUFFIX = .origin;
+
   public static DatanodeRegistration 
   getDNRegistrationForBP(DataNode dn, String bpid) throws IOException {
 return dn.getDNRegistrationForBP(bpid);
@@ -159,4 +161,61 @@ public class DataNodeTestUtils {
   final String bpid, final long blkId) {
 return FsDatasetTestUtil.fetchReplicaInfo(dn.getFSDataset(), bpid, blkId);
   }
+
+  /**
+   * It injects disk failures to data dirs by replacing these data dirs with
+   * regular files.
+   *
+   * @param dirs data directories.
+   * @throws IOException on I/O error.
+   */
+  public static void injectDataDirFailure(File... dirs) throws IOException {
+for (File dir : dirs) {
+  File renamedTo = new File(dir.getPath() + DIR_FAILURE_SUFFIX);
+  if (renamedTo.exists()) {
+throw new IOException(String.format(
+Can not inject failure to dir: %s because %s exists.,
+dir, renamedTo));
+  }
+  if (!dir.renameTo(renamedTo)) {
+throw new IOException(String.format(Failed to rename %s to %s.,
+dir, renamedTo));
+  }
+  if (!dir.createNewFile()) {
+throw new IOException(String.format(
+Failed to create file %s to inject disk failure., dir));
+  }
+}
+  }
+
+  /**
+   * Restore the injected data dir failures.
+   *
+   * @see {@link #injectDataDirFailures}.
+   * @param dirs data directories.
+   * @throws IOException
+   */
+  public static void restoreDataDirFromFailure(File... dirs)
+  throws IOException {
+for (File dir : dirs) {
+  File renamedDir = new File(dir.getPath() + DIR_FAILURE_SUFFIX);
+  if (renamedDir.exists()) {
+if (dir.exists()) {
+  if (!dir.isFile()) {
+throw new IOException(
+Injected failure data dir is supposed to be file:  + dir);
+  }
+  if (!dir.delete()) {
+throw new IOException(
+Failed to delete injected failure data dir:  + dir);
+  }
+}
+if (!renamedDir.renameTo(dir)) {
+  throw new 

[02/50] [abbrv] hadoop git commit: HDFS-7917. Use file to replace data dirs in test to simulate a disk failure. Contributed by Lei (Eddy) Xu.

2015-03-24 Thread zhz
HDFS-7917. Use file to replace data dirs in test to simulate a disk failure. 
Contributed by Lei (Eddy) Xu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2c238ae4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2c238ae4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2c238ae4

Branch: refs/heads/HDFS-7285
Commit: 2c238ae4e00371ef76582b007bb0e20ac8455d9c
Parents: 972f1f1
Author: cnauroth cnaur...@apache.org
Authored: Mon Mar 23 16:29:51 2015 -0700
Committer: cnauroth cnaur...@apache.org
Committed: Mon Mar 23 16:29:51 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +
 .../hdfs/server/datanode/DataNodeTestUtils.java | 61 +++-
 .../datanode/TestDataNodeHotSwapVolumes.java| 29 --
 .../datanode/TestDataNodeVolumeFailure.java | 11 +---
 .../TestDataNodeVolumeFailureReporting.java | 46 ---
 .../TestDataNodeVolumeFailureToleration.java|  8 +--
 6 files changed, 88 insertions(+), 70 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c238ae4/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 8c99876..b88b7e3 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -774,6 +774,9 @@ Release 2.7.0 - UNRELEASED
 
 HDFS-7962. Remove duplicated logs in BlockManager. (yliu)
 
+HDFS-7917. Use file to replace data dirs in test to simulate a disk 
failure.
+(Lei (Eddy) Xu via cnauroth)
+
   OPTIMIZATIONS
 
 HDFS-7454. Reduce memory footprint for AclEntries in NameNode.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c238ae4/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/DataNodeTestUtils.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/DataNodeTestUtils.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/DataNodeTestUtils.java
index fd51e52..f9a2ba1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/DataNodeTestUtils.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/DataNodeTestUtils.java
@@ -40,7 +40,9 @@ import com.google.common.base.Preconditions;
  * Utility class for accessing package-private DataNode information during 
tests.
  *
  */
-public class DataNodeTestUtils {  
+public class DataNodeTestUtils {
+  private static final String DIR_FAILURE_SUFFIX = .origin;
+
   public static DatanodeRegistration 
   getDNRegistrationForBP(DataNode dn, String bpid) throws IOException {
 return dn.getDNRegistrationForBP(bpid);
@@ -159,4 +161,61 @@ public class DataNodeTestUtils {
   final String bpid, final long blkId) {
 return FsDatasetTestUtil.fetchReplicaInfo(dn.getFSDataset(), bpid, blkId);
   }
+
+  /**
+   * It injects disk failures to data dirs by replacing these data dirs with
+   * regular files.
+   *
+   * @param dirs data directories.
+   * @throws IOException on I/O error.
+   */
+  public static void injectDataDirFailure(File... dirs) throws IOException {
+for (File dir : dirs) {
+  File renamedTo = new File(dir.getPath() + DIR_FAILURE_SUFFIX);
+  if (renamedTo.exists()) {
+throw new IOException(String.format(
+Can not inject failure to dir: %s because %s exists.,
+dir, renamedTo));
+  }
+  if (!dir.renameTo(renamedTo)) {
+throw new IOException(String.format(Failed to rename %s to %s.,
+dir, renamedTo));
+  }
+  if (!dir.createNewFile()) {
+throw new IOException(String.format(
+Failed to create file %s to inject disk failure., dir));
+  }
+}
+  }
+
+  /**
+   * Restore the injected data dir failures.
+   *
+   * @see {@link #injectDataDirFailures}.
+   * @param dirs data directories.
+   * @throws IOException
+   */
+  public static void restoreDataDirFromFailure(File... dirs)
+  throws IOException {
+for (File dir : dirs) {
+  File renamedDir = new File(dir.getPath() + DIR_FAILURE_SUFFIX);
+  if (renamedDir.exists()) {
+if (dir.exists()) {
+  if (!dir.isFile()) {
+throw new IOException(
+Injected failure data dir is supposed to be file:  + dir);
+  }
+  if (!dir.delete()) {
+throw new IOException(
+Failed to delete injected failure data dir:  + dir);
+  }
+}
+if (!renamedDir.renameTo(dir)) {
+  throw new