HDFS-13252. Code refactoring: Remove Diff.ListType.

Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ba0da278
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ba0da278
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ba0da278

Branch: refs/heads/HDFS-12996
Commit: ba0da2785d251745969f88a50d33ce61876d91aa
Parents: 4eeff62
Author: Tsz-Wo Nicholas Sze <szets...@hortonworks.com>
Authored: Fri Mar 9 15:25:41 2018 -0800
Committer: Tsz-Wo Nicholas Sze <szets...@hortonworks.com>
Committed: Fri Mar 9 15:50:26 2018 -0800

----------------------------------------------------------------------
 .../hdfs/server/namenode/FSDirRenameOp.java     |   3 +-
 .../hdfs/server/namenode/INodeDirectory.java    |  10 +-
 .../hdfs/server/namenode/INodeReference.java    |   4 -
 .../snapshot/DirectorySnapshottableFeature.java |   5 +-
 .../snapshot/DirectoryWithSnapshotFeature.java  | 131 +++++++------------
 .../snapshot/FSImageFormatPBSnapshot.java       |   6 +-
 .../namenode/snapshot/SnapshotDiffInfo.java     |  11 +-
 .../snapshot/SnapshotDiffListingInfo.java       |  15 +--
 .../snapshot/SnapshotFSImageFormat.java         |   4 +-
 .../java/org/apache/hadoop/hdfs/util/Diff.java  | 131 +++++++++++++------
 .../namenode/snapshot/SnapshotTestHelper.java   |  79 ++++++-----
 .../snapshot/TestRenameWithSnapshots.java       | 129 +++++++-----------
 .../snapshot/TestSetQuotaWithSnapshot.java      |   6 +-
 13 files changed, 260 insertions(+), 274 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ba0da278/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
index efc8da2..6162ceb 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
@@ -588,8 +588,7 @@ class FSDirRenameOp {
     private INode srcChild;
     private INode oldDstChild;
 
-    RenameOperation(FSDirectory fsd, INodesInPath srcIIP, INodesInPath dstIIP)
-        throws QuotaExceededException {
+    RenameOperation(FSDirectory fsd, INodesInPath srcIIP, INodesInPath dstIIP) 
{
       this.fsd = fsd;
       this.srcIIP = srcIIP;
       this.dstIIP = dstIIP;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ba0da278/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
index 6594a56..72ad9e9 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java
@@ -39,7 +39,6 @@ import 
org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFea
 import 
org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature;
 import 
org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiffList;
 import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot;
-import org.apache.hadoop.hdfs.util.Diff.ListType;
 import org.apache.hadoop.hdfs.util.ReadOnlyList;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -353,7 +352,7 @@ public class INodeDirectory extends 
INodeWithAdditionalFields
     // replace the instance in the created list of the diff list
     DirectoryWithSnapshotFeature sf = this.getDirectoryWithSnapshotFeature();
     if (sf != null) {
-      sf.getDiffs().replaceChild(ListType.CREATED, oldChild, newChild);
+      sf.getDiffs().replaceCreatedChild(oldChild, newChild);
     }
     
     // update the inodeMap
@@ -746,8 +745,8 @@ public class INodeDirectory extends 
INodeWithAdditionalFields
       final INode newChild) throws QuotaExceededException {
     DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature();
     assert sf != null : "Directory does not have snapshot feature";
-    sf.getDiffs().removeChild(ListType.DELETED, oldChild);
-    sf.getDiffs().replaceChild(ListType.CREATED, oldChild, newChild);
+    sf.getDiffs().removeDeletedChild(oldChild);
+    sf.getDiffs().replaceCreatedChild(oldChild, newChild);
     addChild(newChild, true, Snapshot.CURRENT_STATE_ID);
   }
   
@@ -761,8 +760,7 @@ public class INodeDirectory extends 
INodeWithAdditionalFields
       int latestSnapshotId) throws QuotaExceededException {
     DirectoryWithSnapshotFeature sf = getDirectoryWithSnapshotFeature();
     assert sf != null : "Directory does not have snapshot feature";
-    boolean removeDeletedChild = sf.getDiffs().removeChild(ListType.DELETED,
-        deletedChild);
+    boolean removeDeletedChild = 
sf.getDiffs().removeDeletedChild(deletedChild);
     int sid = removeDeletedChild ? Snapshot.CURRENT_STATE_ID : 
latestSnapshotId;
     final boolean added = addChild(deletedChild, true, sid);
     // update quota usage if adding is successfully and the old child has not

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ba0da278/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java
index db2026d..e4e14f7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java
@@ -126,10 +126,6 @@ public abstract class INodeReference extends INode {
     return referred;
   }
 
-  public final void setReferredINode(INode referred) {
-    this.referred = referred;
-  }
-  
   @Override
   public final boolean isReference() {
     return true;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ba0da278/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectorySnapshottableFeature.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectorySnapshottableFeature.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectorySnapshottableFeature.java
index df2b073..a5bd019 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectorySnapshottableFeature.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectorySnapshottableFeature.java
@@ -44,7 +44,6 @@ import 
org.apache.hadoop.hdfs.server.namenode.INodeReference.WithCount;
 import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithName;
 import org.apache.hadoop.hdfs.server.namenode.INodesInPath;
 import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
-import org.apache.hadoop.hdfs.util.Diff.ListType;
 import org.apache.hadoop.hdfs.util.ReadOnlyList;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.util.Time;
@@ -396,7 +395,7 @@ public class DirectorySnapshottableFeature extends 
DirectoryWithSnapshotFeature
           .getId());
       for (INode child : children) {
         final byte[] name = child.getLocalNameBytes();
-        boolean toProcess = diff.searchIndex(ListType.DELETED, name) < 0;
+        boolean toProcess = !diff.containsDeleted(name);
         if (!toProcess && child instanceof INodeReference.WithName) {
           byte[][] renameTargetPath = findRenameTargetPath(
               snapshotDir, (WithName) child,
@@ -476,7 +475,7 @@ public class DirectorySnapshottableFeature extends 
DirectoryWithSnapshotFeature
         }
         iterate = true;
         level = level + 1;
-        boolean toProcess = diff.searchIndex(ListType.DELETED, name) < 0;
+        boolean toProcess = !diff.containsDeleted(name);
         if (!toProcess && child instanceof INodeReference.WithName) {
           byte[][] renameTargetPath = findRenameTargetPath(snapshotDir,
               (WithName) child, Snapshot.getSnapshotId(later));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ba0da278/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryWithSnapshotFeature.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryWithSnapshotFeature.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryWithSnapshotFeature.java
index 19be8f8..b757232 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryWithSnapshotFeature.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/DirectoryWithSnapshotFeature.java
@@ -17,38 +17,22 @@
  */
 package org.apache.hadoop.hdfs.server.namenode.snapshot;
 
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.ArrayDeque;
-import java.util.Deque;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
+import com.google.common.base.Preconditions;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
 import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite;
-import org.apache.hadoop.hdfs.server.namenode.AclStorage;
-import org.apache.hadoop.hdfs.server.namenode.ContentCounts;
-import org.apache.hadoop.hdfs.server.namenode.ContentSummaryComputationContext;
-import org.apache.hadoop.hdfs.server.namenode.FSImageSerialization;
-import org.apache.hadoop.hdfs.server.namenode.INode;
-import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
-import org.apache.hadoop.hdfs.server.namenode.INodeDirectoryAttributes;
-import org.apache.hadoop.hdfs.server.namenode.INodeFile;
-import org.apache.hadoop.hdfs.server.namenode.INodeReference;
-import org.apache.hadoop.hdfs.server.namenode.QuotaCounts;
+import org.apache.hadoop.hdfs.server.namenode.*;
 import 
org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotFSImageFormat.ReferenceMap;
 import org.apache.hadoop.hdfs.util.Diff;
 import org.apache.hadoop.hdfs.util.Diff.Container;
-import org.apache.hadoop.hdfs.util.Diff.ListType;
 import org.apache.hadoop.hdfs.util.Diff.UndoInfo;
 import org.apache.hadoop.hdfs.util.ReadOnlyList;
-
-import com.google.common.base.Preconditions;
 import org.apache.hadoop.security.AccessControlException;
 
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.*;
+
 import static 
org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot.NO_SNAPSHOT_ID;
 
 /**
@@ -70,56 +54,43 @@ public class DirectoryWithSnapshotFeature implements 
INode.Feature {
     }
 
     /**
-     * Replace the given child from the created/deleted list.
+     * Replace the given child from the created list.
      * @return true if the child is replaced; false if the child is not found.
      */
-    private boolean replace(final ListType type,
-        final INode oldChild, final INode newChild) {
-      final List<INode> list = getList(type);
+    private boolean replaceCreated(final INode oldChild, final INode newChild) 
{
+      final List<INode> list = getCreatedUnmodifiable();
       final int i = search(list, oldChild.getLocalNameBytes());
       if (i < 0 || list.get(i).getId() != oldChild.getId()) {
         return false;
       }
 
-      final INode removed = list.set(i, newChild);
+      final INode removed = setCreated(i, newChild);
       Preconditions.checkState(removed == oldChild);
       return true;
     }
 
-    private boolean removeChild(ListType type, final INode child) {
-      final List<INode> list = getList(type);
-      final int i = searchIndex(type, child.getLocalNameBytes());
-      if (i >= 0 && list.get(i) == child) {
-        list.remove(i);
-        return true;
-      }
-      return false;
-    }
-
     /** clear the created list */
     private void destroyCreatedList(INode.ReclaimContext reclaimContext,
         final INodeDirectory currentINode) {
-      final List<INode> createdList = getList(ListType.CREATED);
-      for (INode c : createdList) {
+      for (INode c : getCreatedUnmodifiable()) {
         c.destroyAndCollectBlocks(reclaimContext);
         // c should be contained in the children list, remove it
         currentINode.removeChild(c);
       }
-      createdList.clear();
+      clearCreated();
     }
 
     /** clear the deleted list */
     private void destroyDeletedList(INode.ReclaimContext reclaimContext) {
-      final List<INode> deletedList = getList(ListType.DELETED);
-      for (INode d : deletedList) {
+      for (INode d : getDeletedUnmodifiable()) {
         d.destroyAndCollectBlocks(reclaimContext);
       }
-      deletedList.clear();
+      clearDeleted();
     }
 
     /** Serialize {@link #created} */
     private void writeCreated(DataOutput out) throws IOException {
-      final List<INode> created = getList(ListType.CREATED);
+      final List<INode> created = getCreatedUnmodifiable();
       out.writeInt(created.size());
       for (INode node : created) {
         // For INode in created list, we only need to record its local name
@@ -132,7 +103,7 @@ public class DirectoryWithSnapshotFeature implements 
INode.Feature {
     /** Serialize {@link #deleted} */
     private void writeDeleted(DataOutput out,
         ReferenceMap referenceMap) throws IOException {
-      final List<INode> deleted = getList(ListType.DELETED);
+      final List<INode> deleted = getDeletedUnmodifiable();
       out.writeInt(deleted.size());
       for (INode node : deleted) {
         FSImageSerialization.saveINode2Image(node, out, true, referenceMap);
@@ -148,7 +119,7 @@ public class DirectoryWithSnapshotFeature implements 
INode.Feature {
 
     /** Get the list of INodeDirectory contained in the deleted list */
     private void getDirsInDeleted(List<INodeDirectory> dirList) {
-      for (INode node : getList(ListType.DELETED)) {
+      for (INode node : getDeletedUnmodifiable()) {
         if (node.isDirectory()) {
           dirList.add(node.asDirectory());
         }
@@ -347,24 +318,24 @@ public class DirectoryWithSnapshotFeature implements 
INode.Feature {
     }
 
     /** Replace the given child in the created/deleted list, if there is any. 
*/
-    public boolean replaceChild(final ListType type, final INode oldChild,
+    public boolean replaceCreatedChild(final INode oldChild,
         final INode newChild) {
       final DiffList<DirectoryDiff> diffList = asList();
       for(int i = diffList.size() - 1; i >= 0; i--) {
         final ChildrenDiff diff = diffList.get(i).diff;
-        if (diff.replace(type, oldChild, newChild)) {
+        if (diff.replaceCreated(oldChild, newChild)) {
           return true;
         }
       }
       return false;
     }
 
-    /** Remove the given child in the created/deleted list, if there is any. */
-    public boolean removeChild(final ListType type, final INode child) {
+    /** Remove the given child from the deleted list, if there is any. */
+    public boolean removeDeletedChild(final INode child) {
       final DiffList<DirectoryDiff> diffList = asList();
       for(int i = diffList.size() - 1; i >= 0; i--) {
         final ChildrenDiff diff = diffList.get(i).diff;
-        if (diff.removeChild(type, child)) {
+        if (diff.removeDeleted(child)) {
           return true;
         }
       }
@@ -380,11 +351,9 @@ public class DirectoryWithSnapshotFeature implements 
INode.Feature {
     public int findSnapshotDeleted(final INode child) {
       final DiffList<DirectoryDiff> diffList = asList();
       for(int i = diffList.size() - 1; i >= 0; i--) {
-        final ChildrenDiff diff = diffList.get(i).diff;
-        final int d = diff.searchIndex(ListType.DELETED,
-            child.getLocalNameBytes());
-        if (d >= 0 && diff.getList(ListType.DELETED).get(d) == child) {
-          return diffList.get(i).getSnapshotId();
+        final DirectoryDiff diff = diffList.get(i);
+        if (diff.getChildrenDiff().containsDeleted(child)) {
+          return diff.getSnapshotId();
         }
       }
       return NO_SNAPSHOT_ID;
@@ -442,7 +411,7 @@ public class DirectoryWithSnapshotFeature implements 
INode.Feature {
         DirectoryDiffList diffList = sf.getDiffs();
         DirectoryDiff priorDiff = diffList.getDiffById(prior);
         if (priorDiff != null && priorDiff.getSnapshotId() == prior) {
-          List<INode> dList = priorDiff.diff.getList(ListType.DELETED);
+          List<INode> dList = priorDiff.diff.getDeletedUnmodifiable();
           excludedNodes = cloneDiffList(dList);
         }
         
@@ -512,8 +481,8 @@ public class DirectoryWithSnapshotFeature implements 
INode.Feature {
         }
 
         for (INode child : dir.getChildrenList(prior)) {
-          if (priorChildrenDiff != null && priorChildrenDiff.search(
-              ListType.DELETED, child.getLocalNameBytes()) != null) {
+          if (priorChildrenDiff != null && priorChildrenDiff.getDeleted(
+              child.getLocalNameBytes()) != null) {
             continue;
           }
           queue.addLast(child);
@@ -558,12 +527,14 @@ public class DirectoryWithSnapshotFeature implements 
INode.Feature {
       boolean setModTime, int latestSnapshotId) throws QuotaExceededException {
     ChildrenDiff diff = diffs.checkAndAddLatestSnapshotDiff(latestSnapshotId,
         parent).diff;
-    int undoInfo = diff.create(inode);
-
-    final boolean added = parent.addChild(inode, setModTime,
-        Snapshot.CURRENT_STATE_ID);
-    if (!added) {
-      diff.undoCreate(inode, undoInfo);
+    final int undoInfo = diff.create(inode);
+    boolean added = false;
+    try {
+      added = parent.addChild(inode, setModTime, Snapshot.CURRENT_STATE_ID);
+    } finally {
+      if (!added) {
+        diff.undoCreate(inode, undoInfo);
+      }
     }
     return added;
   }
@@ -587,12 +558,14 @@ public class DirectoryWithSnapshotFeature implements 
INode.Feature {
     // any snapshot.
     ChildrenDiff diff = diffs.checkAndAddLatestSnapshotDiff(latestSnapshotId,
         parent).diff;
-    UndoInfo<INode> undoInfo = diff.delete(child);
-
-    final boolean removed = parent.removeChild(child);
-    if (!removed && undoInfo != null) {
-      // remove failed, undo
-      diff.undoDelete(child, undoInfo);
+    final UndoInfo<INode> undoInfo = diff.delete(child);
+    boolean removed = false;
+    try {
+      removed = parent.removeChild(child);
+    } finally {
+      if (!removed) {
+        diff.undoDelete(child, undoInfo);
+      }
     }
     return removed;
   }
@@ -648,7 +621,7 @@ public class DirectoryWithSnapshotFeature implements 
INode.Feature {
       BlockStoragePolicySuite bsps, byte storagePolicyId) {
     final QuotaCounts counts = new QuotaCounts.Builder().build();
     for(DirectoryDiff d : diffs) {
-      for(INode deleted : d.getChildrenDiff().getList(ListType.DELETED)) {
+      for(INode deleted : d.getChildrenDiff().getDeletedUnmodifiable()) {
         final byte childPolicyId = deleted.getStoragePolicyIDForQuota(
             storagePolicyId);
         counts.add(deleted.computeQuotaUsage(bsps, childPolicyId, false,
@@ -664,7 +637,7 @@ public class DirectoryWithSnapshotFeature implements 
INode.Feature {
     ContentSummaryComputationContext summary = 
         new ContentSummaryComputationContext(bsps);
     for(DirectoryDiff d : diffs) {
-      for(INode deleted : d.getChildrenDiff().getList(ListType.DELETED)) {
+      for(INode deleted : d.getChildrenDiff().getDeletedUnmodifiable()) {
         deleted.computeContentSummary(Snapshot.CURRENT_STATE_ID, summary);
       }
     }
@@ -747,10 +720,8 @@ public class DirectoryWithSnapshotFeature implements 
INode.Feature {
       if (prior != NO_SNAPSHOT_ID) {
         DirectoryDiff priorDiff = this.getDiffs().getDiffById(prior);
         if (priorDiff != null && priorDiff.getSnapshotId() == prior) {
-          List<INode> cList = priorDiff.diff.getList(ListType.CREATED);
-          List<INode> dList = priorDiff.diff.getList(ListType.DELETED);
-          priorCreated = cloneDiffList(cList);
-          priorDeleted = cloneDiffList(dList);
+          priorCreated = 
cloneDiffList(priorDiff.diff.getCreatedUnmodifiable());
+          priorDeleted = 
cloneDiffList(priorDiff.diff.getDeletedUnmodifiable());
         }
       }
 
@@ -770,8 +741,7 @@ public class DirectoryWithSnapshotFeature implements 
INode.Feature {
           // cleanSubtreeRecursively call.
           if (priorCreated != null) {
             // we only check the node originally in prior's created list
-            for (INode cNode : priorDiff.getChildrenDiff().getList(
-                ListType.CREATED)) {
+            for (INode cNode : priorDiff.diff.getCreatedUnmodifiable()) {
               if (priorCreated.containsKey(cNode)) {
                 cNode.cleanSubtree(reclaimContext, snapshot, NO_SNAPSHOT_ID);
               }
@@ -786,8 +756,7 @@ public class DirectoryWithSnapshotFeature implements 
INode.Feature {
           // For files moved from posterior's deleted list, we also need to
           // delete its snapshot copy associated with the posterior snapshot.
           
-          for (INode dNode : priorDiff.getChildrenDiff().getList(
-              ListType.DELETED)) {
+          for (INode dNode : priorDiff.diff.getDeletedUnmodifiable()) {
             if (priorDeleted == null || !priorDeleted.containsKey(dNode)) {
               cleanDeletedINode(reclaimContext, dNode, snapshot, prior);
             }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ba0da278/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java
index 4b619a4..11c154e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/FSImageFormatPBSnapshot.java
@@ -78,7 +78,6 @@ import 
org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeat
 import 
org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiffList;
 import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot.Root;
 import org.apache.hadoop.hdfs.server.namenode.XAttrFeature;
-import org.apache.hadoop.hdfs.util.Diff.ListType;
 import org.apache.hadoop.hdfs.util.EnumCounters;
 
 import com.google.common.base.Preconditions;
@@ -581,10 +580,9 @@ public class FSImageFormatPBSnapshot {
                     buildINodeDirectory(copy, parent.getSaverContext()));
           }
           // process created list and deleted list
-          List<INode> created = diff.getChildrenDiff()
-              .getList(ListType.CREATED);
+          List<INode> created = 
diff.getChildrenDiff().getCreatedUnmodifiable();
           db.setCreatedListSize(created.size());
-          List<INode> deleted = 
diff.getChildrenDiff().getList(ListType.DELETED);
+          List<INode> deleted = 
diff.getChildrenDiff().getDeletedUnmodifiable();
           for (INode d : deleted) {
             if (d.isReference()) {
               refList.add(d.asReference());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ba0da278/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotDiffInfo.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotDiffInfo.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotDiffInfo.java
index e3592cc..4c74afd 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotDiffInfo.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotDiffInfo.java
@@ -32,7 +32,6 @@ import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
 import org.apache.hadoop.hdfs.server.namenode.INodeFile;
 import org.apache.hadoop.hdfs.server.namenode.INodeReference;
 import 
org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.ChildrenDiff;
-import org.apache.hadoop.hdfs.util.Diff.ListType;
 
 import com.google.common.base.Preconditions;
 import com.google.common.primitives.SignedBytes;
@@ -141,7 +140,7 @@ class SnapshotDiffInfo {
     dirDiffMap.put(dir, diff);
     diffMap.put(dir, relativePath);
     // detect rename
-    for (INode created : diff.getList(ListType.CREATED)) {
+    for (INode created : diff.getCreatedUnmodifiable()) {
       if (created.isReference()) {
         RenameEntry entry = getEntry(created.getId());
         if (entry.getTargetPath() == null) {
@@ -149,7 +148,7 @@ class SnapshotDiffInfo {
         }
       }
     }
-    for (INode deleted : diff.getList(ListType.DELETED)) {
+    for (INode deleted : diff.getDeletedUnmodifiable()) {
       if (deleted instanceof INodeReference.WithName) {
         RenameEntry entry = getEntry(deleted.getId());
         entry.setSource(deleted, relativePath);
@@ -221,11 +220,9 @@ class SnapshotDiffInfo {
   private List<DiffReportEntry> generateReport(ChildrenDiff dirDiff,
       byte[][] parentPath, boolean fromEarlier, Map<Long, RenameEntry> 
renameMap) {
     List<DiffReportEntry> list = new ChunkedArrayList<>();
-    List<INode> created = dirDiff.getList(ListType.CREATED);
-    List<INode> deleted = dirDiff.getList(ListType.DELETED);
     byte[][] fullPath = new byte[parentPath.length + 1][];
     System.arraycopy(parentPath, 0, fullPath, 0, parentPath.length);
-    for (INode cnode : created) {
+    for (INode cnode : dirDiff.getCreatedUnmodifiable()) {
       RenameEntry entry = renameMap.get(cnode.getId());
       if (entry == null || !entry.isRename()) {
         fullPath[fullPath.length - 1] = cnode.getLocalNameBytes();
@@ -233,7 +230,7 @@ class SnapshotDiffInfo {
             : DiffType.DELETE, fullPath));
       }
     }
-    for (INode dnode : deleted) {
+    for (INode dnode : dirDiff.getDeletedUnmodifiable()) {
       RenameEntry entry = renameMap.get(dnode.getId());
       if (entry != null && entry.isRename()) {
         list.add(new DiffReportEntry(DiffType.RENAME,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ba0da278/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotDiffListingInfo.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotDiffListingInfo.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotDiffListingInfo.java
index 738aa23..a796070 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotDiffListingInfo.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotDiffListingInfo.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
 import org.apache.hadoop.hdfs.server.namenode.INodeFile;
 import org.apache.hadoop.hdfs.server.namenode.INodeReference;
 import 
org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.ChildrenDiff;
-import org.apache.hadoop.hdfs.util.Diff.ListType;
 
 import com.google.common.base.Preconditions;
 import org.apache.hadoop.util.ChunkedArrayList;
@@ -96,10 +95,10 @@ class SnapshotDiffListingInfo {
       }
     }
 
-    if (lastIndex == -1 || lastIndex < diff.getList(ListType.CREATED).size()) {
+    final List<INode> clist =  diff.getCreatedUnmodifiable();
+    if (lastIndex == -1 || lastIndex < clist.size()) {
       ListIterator<INode> iterator = lastIndex != -1 ?
-          diff.getList(ListType.CREATED).listIterator(lastIndex)
-          : diff.getList(ListType.CREATED).listIterator();
+          clist.listIterator(lastIndex): clist.listIterator();
       while (iterator.hasNext()) {
         if (getTotalEntries() < maxEntries) {
           INode created = iterator.next();
@@ -115,11 +114,11 @@ class SnapshotDiffListingInfo {
       setLastIndex(-1);
     }
 
-    if (lastIndex == -1 || lastIndex >= diff.getList(ListType.CREATED).size()) 
{
-      int size = diff.getList(ListType.DELETED).size();
+    if (lastIndex == -1 || lastIndex >= clist.size()) {
+      final List<INode> dlist =  diff.getDeletedUnmodifiable();
+      int size = dlist.size();
       ListIterator<INode> iterator = lastIndex != -1 ?
-          diff.getList(ListType.DELETED).listIterator(lastIndex - size)
-          : diff.getList(ListType.DELETED).listIterator();
+          dlist.listIterator(lastIndex - size): dlist.listIterator();
       while (iterator.hasNext()) {
         if (getTotalEntries() < maxEntries) {
           final INode d = iterator.next();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ba0da278/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotFSImageFormat.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotFSImageFormat.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotFSImageFormat.java
index d1ae293..d60a038 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotFSImageFormat.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotFSImageFormat.java
@@ -38,7 +38,6 @@ import org.apache.hadoop.hdfs.server.namenode.INodeReference;
 import 
org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff;
 import 
org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiffList;
 import org.apache.hadoop.hdfs.tools.snapshot.SnapshotDiff;
-import org.apache.hadoop.hdfs.util.Diff.ListType;
 import org.apache.hadoop.hdfs.util.ReadOnlyList;
 
 import com.google.common.base.Preconditions;
@@ -145,8 +144,7 @@ public class SnapshotFSImageFormat {
     // the INode in the created list should be a reference to another INode
     // in posterior SnapshotDiffs or one of the current children
     for (DirectoryDiff postDiff : parent.getDiffs()) {
-      final INode d = postDiff.getChildrenDiff().search(ListType.DELETED,
-          createdNodeName);
+      final INode d = postDiff.getChildrenDiff().getDeleted(createdNodeName);
       if (d != null) {
         return d;
       } // else go to the next SnapshotDiff

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ba0da278/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/Diff.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/Diff.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/Diff.java
index 26d1fb5..1f87a7a 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/Diff.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/Diff.java
@@ -73,10 +73,6 @@ import com.google.common.base.Preconditions;
  * @param <E> The element type, which must implement {@link Element} interface.
  */
 public class Diff<K, E extends Diff.Element<K>> {
-  public enum ListType {
-    CREATED, DELETED
-  }
-
   /** An interface for the elements in a {@link Diff}. */
   public static interface Element<K> extends Comparable<K> {
     /** @return the key of this object. */
@@ -156,26 +152,73 @@ public class Diff<K, E extends Diff.Element<K>> {
     this.deleted = deleted;
   }
 
-  /** @return the created list, which is never null. */
-  public List<E> getList(final ListType type) {
-    final List<E> list = type == ListType.CREATED? created: deleted;
-    return list == null? Collections.<E>emptyList(): list;
+  public List<E> getCreatedUnmodifiable() {
+    return created != null? Collections.unmodifiableList(created)
+        : Collections.emptyList();
+  }
+
+  public E setCreated(int index, E element) {
+    final E old = created.set(index, element);
+    if (old.compareTo(element.getKey()) != 0) {
+      throw new AssertionError("Element mismatched: element=" + element
+          + " but old=" + old);
+    }
+    return old;
+  }
+
+  public void clearCreated() {
+    if (created != null) {
+      created.clear();
+    }
+  }
+
+  public List<E> getDeletedUnmodifiable() {
+    return deleted != null? Collections.unmodifiableList(deleted)
+        : Collections.emptyList();
+  }
+
+  public boolean containsDeleted(final K key) {
+    if (deleted != null) {
+      return search(deleted, key) >= 0;
+    }
+    return false;
   }
 
-  public int searchIndex(final ListType type, final K name) {
-    return search(getList(type), name);
+  public boolean containsDeleted(final E element) {
+    return getDeleted(element.getKey()) == element;
   }
 
   /**
    * @return null if the element is not found;
-   *         otherwise, return the element in the created/deleted list.
+   *         otherwise, return the element in the deleted list.
    */
-  public E search(final ListType type, final K name) {
-    final List<E> list = getList(type); 
-    final int c = search(list, name);
-    return c < 0 ? null : list.get(c);
+  public E getDeleted(final K key) {
+    if (deleted != null) {
+      final int c = search(deleted, key);
+      if (c >= 0) {
+        return deleted.get(c);
+      }
+    }
+    return null;
   }
-  
+
+  public boolean removeDeleted(final E element) {
+    if (deleted != null) {
+      final int i = search(deleted, element.getKey());
+      if (i >= 0 && deleted.get(i) == element) {
+        deleted.remove(i);
+        return true;
+      }
+    }
+    return false;
+  }
+
+  public void clearDeleted() {
+    if (deleted != null) {
+      deleted.clear();
+    }
+  }
+
   /** @return true if no changes contained in the diff */
   public boolean isEmpty() {
     return (created == null || created.isEmpty())
@@ -183,34 +226,44 @@ public class Diff<K, E extends Diff.Element<K>> {
   }
   
   /**
-   * Insert the given element to the created/deleted list.
+   * Add the given element to the created list,
+   * provided the element does not exist, i.e. i < 0.
+   *
    * @param i the insertion point defined
    *          in {@link Collections#binarySearch(List, Object)}
+   * @throws AssertionError if i >= 0.
    */
-  private void insert(final ListType type, final E element, final int i) {
-    List<E> list = type == ListType.CREATED? created: deleted; 
+  private void addCreated(final E element, final int i) {
     if (i >= 0) {
       throw new AssertionError("Element already exists: element=" + element
-          + ", " + type + "=" + list);
+          + ", created=" + created);
     }
-    if (list == null) {
-      list = new ArrayList<E>(DEFAULT_ARRAY_INITIAL_CAPACITY);
-      if (type == ListType.CREATED) {
-        created = list;
-      } else if (type == ListType.DELETED){
-        deleted = list;
-      }
+    if (created == null) {
+      created = new ArrayList<>(DEFAULT_ARRAY_INITIAL_CAPACITY);
+    }
+    created.add(-i - 1, element);
+  }
+
+  /** Similar to {@link #addCreated(Element, int)} but for the deleted list. */
+  private void addDeleted(final E element, final int i) {
+    if (i >= 0) {
+      throw new AssertionError("Element already exists: element=" + element
+          + ", deleted=" + deleted);
     }
-    list.add(-i - 1, element);
+    if (deleted == null) {
+      deleted = new ArrayList<>(DEFAULT_ARRAY_INITIAL_CAPACITY);
+    }
+    deleted.add(-i - 1, element);
   }
 
+
   /**
    * Create an element in current state.
    * @return the c-list insertion point for undo.
    */
   public int create(final E element) {
     final int c = search(created, element.getKey());
-    insert(ListType.CREATED, element, c);
+    addCreated(element, c);
     return c;
   }
 
@@ -236,7 +289,7 @@ public class Diff<K, E extends Diff.Element<K>> {
     } else {
       // not in c-list, it must be in previous
       d = search(deleted, element.getKey());
-      insert(ListType.DELETED, element, d);
+      addDeleted(element, d);
     }
     return new UndoInfo<E>(c, previous, d);
   }
@@ -277,8 +330,8 @@ public class Diff<K, E extends Diff.Element<K>> {
       d = search(deleted, oldElement.getKey());
       if (d < 0) {
         // Case 2.3: neither in c-list nor d-list
-        insert(ListType.CREATED, newElement, c);
-        insert(ListType.DELETED, oldElement, d);
+        addCreated(newElement, c);
+        addDeleted(oldElement, d);
       }
     }
     return new UndoInfo<E>(c, previous, d);
@@ -348,7 +401,7 @@ public class Diff<K, E extends Diff.Element<K>> {
    */
   public List<E> apply2Previous(final List<E> previous) {
     return apply2Previous(previous,
-        getList(ListType.CREATED), getList(ListType.DELETED));
+        getCreatedUnmodifiable(), getDeletedUnmodifiable());
   }
 
   private static <K, E extends Diff.Element<K>> List<E> apply2Previous(
@@ -408,7 +461,7 @@ public class Diff<K, E extends Diff.Element<K>> {
    */
   public List<E> apply2Current(final List<E> current) {
     return apply2Previous(current,
-        getList(ListType.DELETED), getList(ListType.CREATED));
+        getDeletedUnmodifiable(), getCreatedUnmodifiable());
   }
   
   /**
@@ -443,8 +496,10 @@ public class Diff<K, E extends Diff.Element<K>> {
    */
   public void combinePosterior(final Diff<K, E> posterior,
       final Processor<E> deletedProcesser) {
-    final Iterator<E> createdIterator = 
posterior.getList(ListType.CREATED).iterator();
-    final Iterator<E> deletedIterator = 
posterior.getList(ListType.DELETED).iterator();
+    final Iterator<E> createdIterator
+        = posterior.getCreatedUnmodifiable().iterator();
+    final Iterator<E> deletedIterator
+        = posterior.getDeletedUnmodifiable().iterator();
 
     E c = createdIterator.hasNext()? createdIterator.next(): null;
     E d = deletedIterator.hasNext()? deletedIterator.next(): null;
@@ -479,7 +534,7 @@ public class Diff<K, E extends Diff.Element<K>> {
   @Override
   public String toString() {
     return getClass().getSimpleName()
-        +  "{created=" + getList(ListType.CREATED)
-        + ", deleted=" + getList(ListType.DELETED) + "}";
+        +  "{created=" + getCreatedUnmodifiable()
+        + ", deleted=" + getDeletedUnmodifiable() + "}";
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ba0da278/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java
index 87fb54e..28cb757 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java
@@ -17,23 +17,6 @@
  */
 package org.apache.hadoop.hdfs.server.namenode.snapshot;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -46,47 +29,71 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants;
 import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
 import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
 import 
org.apache.hadoop.hdfs.server.blockmanagement.BlockUnderConstructionFeature;
-import org.apache.hadoop.hdfs.server.datanode.BlockPoolSliceStorage;
-import org.apache.hadoop.hdfs.server.datanode.BlockScanner;
-import org.apache.hadoop.hdfs.server.datanode.DataNode;
-import org.apache.hadoop.hdfs.server.datanode.DirectoryScanner;
-import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
-import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
-import org.apache.hadoop.hdfs.server.namenode.INode;
-import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
-import org.apache.hadoop.hdfs.server.namenode.INodeFile;
-import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
-import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
+import org.apache.hadoop.hdfs.server.datanode.*;
+import org.apache.hadoop.hdfs.server.datanode.checker.DatasetVolumeChecker;
+import org.apache.hadoop.hdfs.server.datanode.checker.ThrottledAsyncChecker;
+import org.apache.hadoop.hdfs.server.namenode.*;
+import org.apache.hadoop.hdfs.server.namenode.top.metrics.TopMetrics;
+import org.apache.hadoop.http.HttpRequestLog;
 import org.apache.hadoop.http.HttpServer2;
 import org.apache.hadoop.ipc.ProtobufRpcEngine.Server;
 import org.apache.hadoop.metrics2.impl.MetricsSystemImpl;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.hadoop.util.GSet;
 import org.junit.Assert;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.*;
+import java.util.*;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 /**
  * Helper for writing snapshot related tests
  */
 public class SnapshotTestHelper {
-  public static final Log LOG = LogFactory.getLog(SnapshotTestHelper.class);
+  static final Logger LOG = LoggerFactory.getLogger(SnapshotTestHelper.class);
 
   /** Disable the logs that are not very useful for snapshot related tests. */
   public static void disableLogs() {
     final String[] lognames = {
+        "org.apache.hadoop.hdfs.server.common.Util",
+        
"org.apache.hadoop.hdfs.server.blockmanagement.BlockReportLeaseManager",
+        "org.apache.hadoop.hdfs.server.namenode.FileJournalManager",
+        "org.apache.hadoop.hdfs.server.namenode.NNStorageRetentionManager",
+        "org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf",
+        "org.apache.hadoop.hdfs.server.namenode.FSEditLog",
         "org.apache.hadoop.hdfs.server.datanode.BlockPoolSliceScanner",
+        "org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.BlockPoolSlice",
         "org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl",
         
"org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetAsyncDiskService",
+        "org.apache.hadoop.hdfs.server.datanode.fsdataset.impl" +
+            ".RamDiskAsyncLazyPersistService",
     };
     for(String n : lognames) {
-      GenericTestUtils.disableLog(LogFactory.getLog(n));
+      GenericTestUtils.disableLog(LoggerFactory.getLogger(n));
     }
     
-    GenericTestUtils.disableLog(LogFactory.getLog(UserGroupInformation.class));
-    GenericTestUtils.disableLog(LogFactory.getLog(BlockManager.class));
-    GenericTestUtils.disableLog(LogFactory.getLog(FSNamesystem.class));
-    GenericTestUtils.disableLog(LogFactory.getLog(DirectoryScanner.class));
-    GenericTestUtils.disableLog(LogFactory.getLog(MetricsSystemImpl.class));
-    
+    GenericTestUtils.disableLog(LoggerFactory.getLogger(
+        UserGroupInformation.class));
+    GenericTestUtils.disableLog(LoggerFactory.getLogger(BlockManager.class));
+    GenericTestUtils.disableLog(LoggerFactory.getLogger(FSNamesystem.class));
+    GenericTestUtils.disableLog(LoggerFactory.getLogger(
+        DirectoryScanner.class));
+    GenericTestUtils.disableLog(LoggerFactory.getLogger(
+        MetricsSystemImpl.class));
+
+    GenericTestUtils.disableLog(DatasetVolumeChecker.LOG);
+    GenericTestUtils.disableLog(DatanodeDescriptor.LOG);
+    GenericTestUtils.disableLog(GSet.LOG);
+    GenericTestUtils.disableLog(TopMetrics.LOG);
+    GenericTestUtils.disableLog(HttpRequestLog.LOG);
+    GenericTestUtils.disableLog(ThrottledAsyncChecker.LOG);
+    GenericTestUtils.disableLog(VolumeScanner.LOG);
     GenericTestUtils.disableLog(BlockScanner.LOG);
     GenericTestUtils.disableLog(HttpServer2.LOG);
     GenericTestUtils.disableLog(DataNode.LOG);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ba0da278/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java
index 770651e..d36e3b6 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java
@@ -17,22 +17,6 @@
  */
 package org.apache.hadoop.hdfs.server.namenode.snapshot;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertSame;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-import static org.mockito.Matchers.anyBoolean;
-import static org.mockito.Matchers.anyObject;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.spy;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.Random;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -41,12 +25,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Options.Rename;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.hdfs.DFSOutputStream;
-import org.apache.hadoop.hdfs.DFSTestUtil;
-import org.apache.hadoop.hdfs.DFSUtil;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.*;
 import org.apache.hadoop.hdfs.client.HdfsDataOutputStream.SyncFlag;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
@@ -55,18 +34,10 @@ import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
 import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport.DiffReportEntry;
 import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport.DiffType;
 import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
-import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
-import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
-import org.apache.hadoop.hdfs.server.namenode.INode;
-import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
-import org.apache.hadoop.hdfs.server.namenode.INodeFile;
-import org.apache.hadoop.hdfs.server.namenode.INodeReference;
+import org.apache.hadoop.hdfs.server.namenode.*;
 import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithCount;
-import org.apache.hadoop.hdfs.server.namenode.INodesInPath;
-import org.apache.hadoop.hdfs.server.namenode.QuotaCounts;
 import 
org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.ChildrenDiff;
 import 
org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff;
-import org.apache.hadoop.hdfs.util.Diff.ListType;
 import org.apache.hadoop.hdfs.util.ReadOnlyList;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.After;
@@ -76,6 +47,18 @@ import org.junit.Test;
 import org.mockito.Mockito;
 import org.mockito.internal.util.reflection.Whitebox;
 
+import java.io.File;
+import java.io.IOException;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Random;
+
+import static org.junit.Assert.*;
+import static org.mockito.Matchers.anyBoolean;
+import static org.mockito.Matchers.anyObject;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.spy;
+
 /** Testing rename with snapshots. */
 public class TestRenameWithSnapshots {
   static {
@@ -104,7 +87,11 @@ public class TestRenameWithSnapshots {
   static private final String snap1 = "snap1";
   static private final String snap2 = "snap2";
 
-  
+  static void assertSizes(int createdSize, int deletedSize, ChildrenDiff diff) 
{
+    assertEquals(createdSize, diff.getCreatedUnmodifiable().size());
+    assertEquals(deletedSize, diff.getDeletedUnmodifiable().size());
+  }
+
   @Before
   public void setUp() throws Exception {
     conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCKSIZE);
@@ -1301,9 +1288,8 @@ public class TestRenameWithSnapshots {
     // after the undo of rename, both the created and deleted list of sdir1
     // should be empty
     ChildrenDiff childrenDiff = dir1Diffs.get(0).getChildrenDiff();
-    assertEquals(0, childrenDiff.getList(ListType.DELETED).size());
-    assertEquals(0, childrenDiff.getList(ListType.CREATED).size());
-    
+    assertSizes(0, 0, childrenDiff);
+
     INode fooNode = fsdir.getINode4Write(foo.toString());
     assertTrue(fooNode.isDirectory() && 
fooNode.asDirectory().isWithSnapshot());
     DiffList<DirectoryDiff> fooDiffs =
@@ -1372,13 +1358,12 @@ public class TestRenameWithSnapshots {
     // after the undo of rename, the created list of sdir1 should contain 
     // 1 element
     ChildrenDiff childrenDiff = dir1Diffs.get(0).getChildrenDiff();
-    assertEquals(0, childrenDiff.getList(ListType.DELETED).size());
-    assertEquals(1, childrenDiff.getList(ListType.CREATED).size());
-    
+    assertSizes(1, 0, childrenDiff);
+
     INode fooNode = fsdir.getINode4Write(foo.toString());
     assertTrue(fooNode instanceof INodeDirectory);
-    assertTrue(childrenDiff.getList(ListType.CREATED).get(0) == fooNode);
-    
+    assertTrue(childrenDiff.getCreatedUnmodifiable().get(0) == fooNode);
+
     final Path foo_s1 = SnapshotTestHelper.getSnapshotPath(sdir1, "s1", "foo");
     assertFalse(hdfs.exists(foo_s1));
     
@@ -1438,13 +1423,12 @@ public class TestRenameWithSnapshots {
     assertEquals(1, dir2Diffs.size());
     assertEquals(s2.getId(), dir2Diffs.get(0).getSnapshotId());
     ChildrenDiff childrenDiff = dir2Diffs.get(0).getChildrenDiff();
-    assertEquals(0, childrenDiff.getList(ListType.DELETED).size());
-    assertEquals(1, childrenDiff.getList(ListType.CREATED).size());
+    assertSizes(1, 0, childrenDiff);
     final Path foo_s2 = SnapshotTestHelper.getSnapshotPath(sdir2, "s2", 
"foo2");
     assertFalse(hdfs.exists(foo_s2));
     
     INode fooNode = fsdir.getINode4Write(foo_dir2.toString());
-    assertTrue(childrenDiff.getList(ListType.CREATED).get(0) == fooNode);
+    assertTrue(childrenDiff.getCreatedUnmodifiable().get(0) == fooNode);
     assertTrue(fooNode instanceof INodeReference.DstReference);
     DiffList<DirectoryDiff> fooDiffs =
         fooNode.asDirectory().getDiffs().asList();
@@ -1468,14 +1452,12 @@ public class TestRenameWithSnapshots {
     assertEquals(s3.getId(), dir2Diffs.get(1).getSnapshotId());
     
     childrenDiff = dir2Diffs.get(0).getChildrenDiff();
-    assertEquals(0, childrenDiff.getList(ListType.DELETED).size());
-    assertEquals(1, childrenDiff.getList(ListType.CREATED).size());
-    assertTrue(childrenDiff.getList(ListType.CREATED).get(0) == fooNode);
+    assertSizes(1, 0, childrenDiff);
+    assertTrue(childrenDiff.getCreatedUnmodifiable().get(0) == fooNode);
     
     childrenDiff = dir2Diffs.get(1).getChildrenDiff();
-    assertEquals(0, childrenDiff.getList(ListType.DELETED).size());
-    assertEquals(0, childrenDiff.getList(ListType.CREATED).size());
-    
+    assertSizes(0, 0, childrenDiff);
+
     final Path foo_s3 = SnapshotTestHelper.getSnapshotPath(sdir2, "s3", 
"foo2");
     assertFalse(hdfs.exists(foo_s2));
     assertTrue(hdfs.exists(foo_s3));
@@ -1600,9 +1582,8 @@ public class TestRenameWithSnapshots {
         .getDiffs().asList();
     assertEquals(1, diffList.size());
     DirectoryDiff diff = diffList.get(0);
-    assertTrue(diff.getChildrenDiff().getList(ListType.CREATED).isEmpty());
-    assertTrue(diff.getChildrenDiff().getList(ListType.DELETED).isEmpty());
-    
+    assertSizes(0, 0, diff.getChildrenDiff());
+
     // check dir2
     INodeDirectory dir2Node = 
fsdir2.getINode4Write(dir2.toString()).asDirectory();
     assertTrue(dir2Node.isSnapshottable());
@@ -1618,8 +1599,7 @@ public class TestRenameWithSnapshots {
     diffList = dir2Node.getDiffs().asList();
     assertEquals(1, diffList.size());
     diff = diffList.get(0);
-    assertTrue(diff.getChildrenDiff().getList(ListType.CREATED).isEmpty());
-    assertTrue(diff.getChildrenDiff().getList(ListType.DELETED).isEmpty());
+    assertSizes(0, 0, diff.getChildrenDiff());
   }
   
   /**
@@ -1674,9 +1654,8 @@ public class TestRenameWithSnapshots {
         .getDiffs().asList();
     assertEquals(1, diffList.size());
     DirectoryDiff diff = diffList.get(0);
-    assertTrue(diff.getChildrenDiff().getList(ListType.CREATED).isEmpty());
-    assertTrue(diff.getChildrenDiff().getList(ListType.DELETED).isEmpty());
-    
+    assertSizes(0, 0, diff.getChildrenDiff());
+
     // check dir2
     INodeDirectory dir2Node = 
fsdir2.getINode4Write(dir2.toString()).asDirectory();
     assertTrue(dir2Node.isSnapshottable());
@@ -1696,8 +1675,7 @@ public class TestRenameWithSnapshots {
     diffList = (  dir2Node).getDiffs().asList();
     assertEquals(1, diffList.size());
     diff = diffList.get(0);
-    assertTrue(diff.getChildrenDiff().getList(ListType.CREATED).isEmpty());
-    assertTrue(diff.getChildrenDiff().getList(ListType.DELETED).isEmpty());
+    assertSizes(0, 0, diff.getChildrenDiff());
   }
   
   /**
@@ -1737,9 +1715,8 @@ public class TestRenameWithSnapshots {
     Snapshot s1 = rootNode.getSnapshot(DFSUtil.string2Bytes(snap1));
     assertEquals(s1.getId(), diff.getSnapshotId());
     // after undo, the diff should be empty
-    assertTrue(diff.getChildrenDiff().getList(ListType.DELETED).isEmpty());
-    assertTrue(diff.getChildrenDiff().getList(ListType.CREATED).isEmpty());
-    
+    assertSizes(0, 0, diff.getChildrenDiff());
+
     // bar was converted to filewithsnapshot while renaming
     INodeFile barNode = fsdir.getINode4Write(bar.toString()).asFile();
     assertSame(barNode, children.get(0));
@@ -1989,9 +1966,8 @@ public class TestRenameWithSnapshots {
     Snapshot s1 = dir1Node.getSnapshot(DFSUtil.string2Bytes("s1"));
     assertEquals(s1.getId(), diffList.get(0).getSnapshotId());
     ChildrenDiff diff = diffList.get(0).getChildrenDiff();
-    assertEquals(0, diff.getList(ListType.CREATED).size());
-    assertEquals(0, diff.getList(ListType.DELETED).size());
-    
+    assertSizes(0, 0, diff);
+
     restartClusterAndCheckImage(true);
   }
   
@@ -2062,9 +2038,8 @@ public class TestRenameWithSnapshots {
     assertEquals(s1.getId(), diffList.get(0).getSnapshotId());
     ChildrenDiff diff = diffList.get(0).getChildrenDiff();
     // bar2 and bar3 in the created list
-    assertEquals(2, diff.getList(ListType.CREATED).size());
-    assertEquals(0, diff.getList(ListType.DELETED).size());
-    
+    assertSizes(2, 0, diff);
+
     final INode fooRef2 = fsdir.getINode4Write(foo.toString());
     assertTrue(fooRef2 instanceof INodeReference.DstReference);
     INodeReference.WithCount wc2 = 
@@ -2235,13 +2210,9 @@ public class TestRenameWithSnapshots {
         .asDirectory();
     DiffList<DirectoryDiff> dir1DiffList = dir1Node.getDiffs().asList();
     assertEquals(1, dir1DiffList.size());
-    List<INode> dList = dir1DiffList.get(0).getChildrenDiff()
-        .getList(ListType.DELETED);
-    assertTrue(dList.isEmpty());
-    List<INode> cList = dir1DiffList.get(0).getChildrenDiff()
-        .getList(ListType.CREATED);
-    assertEquals(1, cList.size());
-    INode cNode = cList.get(0);
+    final ChildrenDiff childrenDiff = dir1DiffList.get(0).getChildrenDiff();
+    assertSizes(1, 0, childrenDiff);
+    INode cNode = childrenDiff.getCreatedUnmodifiable().get(0);
     INode fooNode = fsdir.getINode4Write(newfoo.toString());
     assertSame(cNode, fooNode);
     
@@ -2259,7 +2230,7 @@ public class TestRenameWithSnapshots {
     Snapshot s0 = testNode.getSnapshot(DFSUtil.string2Bytes("s0"));
     assertEquals(s0.getId(), diff.getSnapshotId());
     // and file should be stored in the deleted list of this snapshot diff
-    assertEquals("file", diff.getChildrenDiff().getList(ListType.DELETED)
+    assertEquals("file", diff.getChildrenDiff().getDeletedUnmodifiable()
         .get(0).getLocalName());
     
     // check dir2: a WithName instance for foo should be in the deleted list
@@ -2269,7 +2240,8 @@ public class TestRenameWithSnapshots {
     DiffList<DirectoryDiff> dir2DiffList = dir2Node.getDiffs().asList();
     // dir2Node should contain 1 snapshot diffs for s2
     assertEquals(1, dir2DiffList.size());
-    dList = dir2DiffList.get(0).getChildrenDiff().getList(ListType.DELETED);
+    final List<INode> dList = dir2DiffList.get(0).getChildrenDiff()
+        .getDeletedUnmodifiable();
     assertEquals(1, dList.size());
     final Path foo_s2 = SnapshotTestHelper.getSnapshotPath(dir2, "s2", 
         foo.getName());
@@ -2323,8 +2295,7 @@ public class TestRenameWithSnapshots {
     DiffList<DirectoryDiff> diffList = barNode.getDiffs().asList();
     assertEquals(1, diffList.size());
     DirectoryDiff diff = diffList.get(0);
-    assertEquals(0, diff.getChildrenDiff().getList(ListType.DELETED).size());
-    assertEquals(0, diff.getChildrenDiff().getList(ListType.CREATED).size());
+    assertSizes(0, 0, diff.getChildrenDiff());
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ba0da278/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSetQuotaWithSnapshot.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSetQuotaWithSnapshot.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSetQuotaWithSnapshot.java
index 2fecbb1..2459b78 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSetQuotaWithSnapshot.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSetQuotaWithSnapshot.java
@@ -38,7 +38,6 @@ import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.hdfs.server.namenode.INode;
 import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
 import 
org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff;
-import org.apache.hadoop.hdfs.util.Diff.ListType;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Rule;
@@ -153,8 +152,9 @@ public class TestSetQuotaWithSnapshot {
         subNode.asDirectory().getDiffs().asList();
     assertEquals(1, diffList.size());
     Snapshot s2 = dirNode.getSnapshot(DFSUtil.string2Bytes("s2"));
-    assertEquals(s2.getId(), diffList.get(0).getSnapshotId());
-    List<INode> createdList = 
diffList.get(0).getChildrenDiff().getList(ListType.CREATED);
+    final DirectoryDiff diff = diffList.get(0);
+    assertEquals(s2.getId(), diff.getSnapshotId());
+    List<INode> createdList = diff.getChildrenDiff().getCreatedUnmodifiable();
     assertEquals(1, createdList.size());
     assertSame(fsdir.getINode4Write(file.toString()), createdList.get(0));
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to