Author: cnauroth
Date: Mon Apr 14 04:50:14 2014
New Revision: 1587150

URL: http://svn.apache.org/r1587150
Log:
HDFS-6238. Merging change r1587148 from trunk to branch-2.

Modified:
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1587150&r1=1587149&r2=1587150&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
Mon Apr 14 04:50:14 2014
@@ -91,6 +91,8 @@ Release 2.5.0 - UNRELEASED
     HDFS-6237. TestDFSShell#testGet fails on Windows due to invalid file system
     path. (cnauroth)
 
+    HDFS-6238. TestDirectoryScanner leaks file descriptors. (cnauroth)
+
 Release 2.4.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java?rev=1587150&r1=1587149&r2=1587150&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java
 Mon Apr 14 04:50:14 2014
@@ -46,6 +46,7 @@ import org.apache.hadoop.hdfs.server.com
 import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
 import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
 import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetTestUtil;
+import org.apache.hadoop.io.IOUtils;
 import org.junit.Test;
 
 /**
@@ -85,11 +86,17 @@ public class TestDirectoryScanner {
         File mf = b.getMetaFile();
         // Truncate a block file that has a corresponding metadata file
         if (f.exists() && f.length() != 0 && mf.exists()) {
-          FileOutputStream s = new FileOutputStream(f);
-          FileChannel channel = s.getChannel();
-          channel.truncate(0);
-          LOG.info("Truncated block file " + f.getAbsolutePath());
-          return b.getBlockId();
+          FileOutputStream s = null;
+          FileChannel channel = null;
+          try {
+            s = new FileOutputStream(f);
+            channel = s.getChannel();
+            channel.truncate(0);
+            LOG.info("Truncated block file " + f.getAbsolutePath());
+            return b.getBlockId();
+          } finally {
+            IOUtils.cleanup(LOG, channel, s);
+          }
         }
       }
     }


Reply via email to