HDFS-11515. -du throws ConcurrentModificationException. Contributed by Istvan 
Fajth, Wei-Chiu Chuang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bc7aff7c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bc7aff7c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bc7aff7c

Branch: refs/heads/HDFS-7240
Commit: bc7aff7cec07bbc3fed63a00c8f1584c34670998
Parents: 845529b
Author: Wei-Chiu Chuang <[email protected]>
Authored: Mon Apr 3 07:32:27 2017 -0700
Committer: Wei-Chiu Chuang <[email protected]>
Committed: Mon Apr 3 07:35:09 2017 -0700

----------------------------------------------------------------------
 .../snapshot/DirectoryWithSnapshotFeature.java  |  5 ++
 .../snapshot/TestRenameWithSnapshots.java       |  6 +-
 .../namenode/snapshot/TestSnapshotDeletion.java | 75 ++++++++++++++++++++
 3 files changed, 84 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bc7aff7c/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryWithSnapshotFeature.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryWithSnapshotFeature.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryWithSnapshotFeature.java
index 9addbfa..9840679 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryWithSnapshotFeature.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryWithSnapshotFeature.java
@@ -633,6 +633,11 @@ public class DirectoryWithSnapshotFeature implements 
INode.Feature {
     for(DirectoryDiff d : diffs) {
       for(INode deletedNode : d.getChildrenDiff().getList(ListType.DELETED)) {
         context.reportDeletedSnapshottedNode(deletedNode);
+        if (deletedNode.isDirectory()){
+          DirectoryWithSnapshotFeature sf =
+              deletedNode.asDirectory().getDirectoryWithSnapshotFeature();
+          sf.computeContentSummary4Snapshot(context);
+        }
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bc7aff7c/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java
index d1b3aa6..d06c384 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java
@@ -26,6 +26,7 @@ import static org.mockito.Matchers.anyBoolean;
 import static org.mockito.Matchers.anyObject;
 import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.spy;
+import static org.apache.hadoop.test.GenericTestUtils.getTestDir;
 
 import java.io.File;
 import java.io.IOException;
@@ -2429,7 +2430,7 @@ public class TestRenameWithSnapshots {
    */
   @Test (timeout=300000)
   public void testDu() throws Exception {
-    File tempFile = File.createTempFile("testDu-", ".tmp");
+    File tempFile = File.createTempFile("testDu-", ".tmp", getTestDir());
     tempFile.deleteOnExit();
 
     final FileSystem localfs = FileSystem.getLocal(conf);
@@ -2539,7 +2540,8 @@ public class TestRenameWithSnapshots {
    */
   @Test (timeout=300000)
   public void testDuMultipleDirs() throws Exception {
-    File tempFile = File.createTempFile("testDuMultipleDirs-", "" + ".tmp");
+    File tempFile = File.createTempFile("testDuMultipleDirs-", ".tmp",
+        getTestDir());
     tempFile.deleteOnExit();
 
     final FileSystem localfs = FileSystem.getLocal(conf);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bc7aff7c/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotDeletion.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotDeletion.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotDeletion.java
index ca53788..7926e44 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotDeletion.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotDeletion.java
@@ -27,6 +27,7 @@ import java.io.PrintStream;
 import java.security.PrivilegedAction;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.fs.Path;
@@ -1232,4 +1233,78 @@ public class TestSnapshotDeletion {
     // make sure bar has been cleaned from inodeMap
     Assert.assertNull(fsdir.getInode(fileId));
   }
+
+  /**
+   * Test for HDFS-11515.
+   * In a scenario where a directory with subdirectories is removed from the
+   * file system after taking a snapshot on one of its ancestors, du command
+   * fails with a ConcurrentModificationException until a new snapshot is 
taken,
+   * or the old snapshots are removed.
+   * This test is testing this scenario with checks on the space consumed
+   * calculation.
+   *
+   * @throws Exception
+   */
+  @Test(timeout = 180000)
+  public void testDuWithRmdirInSnapshots() throws Exception {
+    final Path parent = new Path("/testDuWithRmdirInSnapshots");
+    final Path snapshotDir = new Path(parent, "snapshotDir");
+    final Path dir1 = new Path(snapshotDir, "d1"); //snapshotDir/d1
+    final Path dir2 = new Path(snapshotDir, "d2"); //snapshotDir/d2
+    final Path dir4 = new Path(dir2, "d4"); //snapshotDir/d2/d4
+    final Path dir3 = new Path(snapshotDir, "d3"); //snapshotDir/d3
+    final Path dir5 = new Path(dir3, "d5"); //snapshotDir/d3/d5
+    final Path aFileOutsideSnapshots = new Path(parent, "aFile");
+    final Path aFileInsideSnapshots = new Path(dir5, "aFile");
+
+    final String snapshotName = "s1";
+    final String snapshotName2 = "s2";
+
+    final long spaceConsumed = BLOCKSIZE * REPLICATION;
+    final long spaceConsumed2 = 2 * spaceConsumed;
+    ContentSummary summary = null;
+
+    DFSTestUtil.createFile(hdfs, aFileOutsideSnapshots,
+        BLOCKSIZE, REPLICATION, 0);
+    summary = hdfs.getContentSummary(parent);
+    assertEquals("Du is wrong even with one file without further ado.",
+        spaceConsumed, summary.getSpaceConsumed());
+
+    hdfs.mkdirs(snapshotDir);
+    hdfs.allowSnapshot(snapshotDir);
+    hdfs.mkdirs(dir1);
+
+    hdfs.createSnapshot(snapshotDir, snapshotName);
+
+    hdfs.mkdirs(dir4);
+    hdfs.mkdirs(dir5);
+    DFSTestUtil.createFile(hdfs, aFileInsideSnapshots,
+        BLOCKSIZE, REPLICATION, 0);
+    summary = hdfs.getContentSummary(parent);
+    assertEquals("Du is wrong with 2 files added to the file system.",
+        spaceConsumed2, summary.getSpaceConsumed());
+
+    hdfs.createSnapshot(snapshotDir, snapshotName2);
+
+    hdfs.delete(dir2, true);
+    hdfs.delete(dir3, true);
+
+    summary = hdfs.getContentSummary(parent);
+    assertEquals("Snapshot file count is not matching expected value.",
+        1, summary.getSnapshotFileCount());
+    assertEquals("Snapshot directory count is not matching expected value.",
+        4, summary.getSnapshotDirectoryCount());
+    assertEquals("Consumed space does not matching expected value.",
+        spaceConsumed, summary.getSnapshotSpaceConsumed());
+    assertEquals("Snapshot length is not matching expected value.",
+        BLOCKSIZE, summary.getSnapshotLength());
+    assertEquals("File count is not matching expected value.",
+        2, summary.getFileCount());
+    assertEquals("Directory count is not matching expected value.",
+        7, summary.getDirectoryCount());
+    assertEquals("Consumed space is not matching expected value.",
+        spaceConsumed2, summary.getSpaceConsumed());
+    assertEquals("Length is not matching expected value.",
+        2 * BLOCKSIZE, summary.getLength());
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to