HDFS-13054. Handling PathIsNotEmptyDirectoryException in DFSClient delete call. Contributed by Nanda kumar.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e990904d Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e990904d Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e990904d Branch: refs/heads/YARN-1011 Commit: e990904dd568a1d8f98efb55c1dd2d598ae4752b Parents: a37e7f0 Author: Arpit Agarwal <[email protected]> Authored: Fri Jan 26 11:42:27 2018 -0800 Committer: Arpit Agarwal <[email protected]> Committed: Fri Jan 26 13:09:13 2018 -0800 ---------------------------------------------------------------------- .../java/org/apache/hadoop/hdfs/DFSClient.java | 4 +++- .../hadoop/hdfs/protocol/ClientProtocol.java | 3 +++ .../hadoop/hdfs/TestDistributedFileSystem.java | 17 +++++++++++++++++ 3 files changed, 23 insertions(+), 1 deletion(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/e990904d/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java index f0769c1..92bb99e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java @@ -82,6 +82,7 @@ import org.apache.hadoop.fs.Options; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; import org.apache.hadoop.fs.QuotaUsage; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.StorageType; @@ -1620,7 +1621,8 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory, FileNotFoundException.class, SafeModeException.class, UnresolvedPathException.class, - SnapshotAccessControlException.class); + SnapshotAccessControlException.class, + PathIsNotEmptyDirectoryException.class); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/e990904d/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java index fbef037..0d77037 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java @@ -26,6 +26,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.crypto.CryptoProtocolVersion; import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries; +import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; import org.apache.hadoop.hdfs.AddBlockFlag; import org.apache.hadoop.fs.CacheFlag; import org.apache.hadoop.fs.ContentSummary; @@ -625,6 +626,8 @@ public interface ClientProtocol { * @throws org.apache.hadoop.fs.UnresolvedLinkException If <code>src</code> * contains a symlink * @throws SnapshotAccessControlException if path is in RO snapshot + * @throws PathIsNotEmptyDirectoryException if path is a non-empty directory + * and <code>recursive</code> is set to false * @throws IOException If an I/O error occurred */ @AtMostOnce http://git-wip-us.apache.org/repos/asf/hadoop/blob/e990904d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java index 823c747..072ee9f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java @@ -67,6 +67,7 @@ import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum; import org.apache.hadoop.fs.Options.ChecksumOpt; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.StorageStatistics.LongStatistic; import org.apache.hadoop.fs.StorageType; @@ -571,6 +572,22 @@ public class TestDistributedFileSystem { in.close(); fs.close(); } + + { + // Test PathIsNotEmptyDirectoryException while deleting non-empty dir + FileSystem fs = cluster.getFileSystem(); + fs.mkdirs(new Path("/test/nonEmptyDir")); + fs.create(new Path("/tmp/nonEmptyDir/emptyFile")).close(); + try { + fs.delete(new Path("/tmp/nonEmptyDir"), false); + Assert.fail("Expecting PathIsNotEmptyDirectoryException"); + } catch (PathIsNotEmptyDirectoryException ex) { + // This is the proper exception to catch; move on. + } + Assert.assertTrue(fs.exists(new Path("/test/nonEmptyDir"))); + fs.delete(new Path("/tmp/nonEmptyDir"), true); + } + } finally { if (cluster != null) {cluster.shutdown();} --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
