MAPREDUCE-6682. TestMRCJCFileOutputCommitter fails intermittently Contributed 
by Akira Ajisaka.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8f1c374b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8f1c374b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8f1c374b

Branch: refs/heads/YARN-2915
Commit: 8f1c374bec2451568f463ab68f7eb6db734ab14e
Parents: a1f6564
Author: Brahma Reddy Battula <bra...@apache.org>
Authored: Thu Aug 4 11:21:47 2016 +0530
Committer: Brahma Reddy Battula <bra...@apache.org>
Committed: Thu Aug 4 11:21:47 2016 +0530

----------------------------------------------------------------------
 .../mapred/TestMRCJCFileOutputCommitter.java    | 14 +++++---
 .../mapred/TestMRTimelineEventHandling.java     | 34 +++++++++++++-------
 2 files changed, 32 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8f1c374b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
index 74b6d77..83796e8 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java
@@ -25,6 +25,8 @@ import org.apache.hadoop.fs.RawLocalFileSystem;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.JobStatus;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.junit.After;
 import org.junit.Test;
 
 import java.io.File;
@@ -37,8 +39,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
 public class TestMRCJCFileOutputCommitter {
-  private static Path outDir = new Path(
-     System.getProperty("test.build.data", "/tmp"), "output");
+  private static Path outDir = new 
Path(GenericTestUtils.getTempPath("output"));
 
   // A random task attempt id for testing.
   private static String attempt = "attempt_200707121733_0001_m_000000_0";
@@ -112,12 +113,11 @@ public class TestMRCJCFileOutputCommitter {
     expectedOutput.append(key2).append('\t').append(val2).append("\n");
     String output = UtilsForTests.slurp(expectedFile);
     assertEquals(output, expectedOutput.toString());
-
-    FileUtil.fullyDelete(new File(outDir.toString()));
   }
 
   @Test
   public void testAbort() throws IOException {
+    FileUtil.fullyDelete(new File(outDir.toString()));
     JobConf job = new JobConf();
     setConfForFileOutputCommitter(job);
     JobContext jContext = new JobContextImpl(job, taskID.getJobID());
@@ -152,7 +152,6 @@ public class TestMRCJCFileOutputCommitter {
     assertFalse("job temp dir "+expectedFile+" still exists", 
expectedFile.exists());
     assertEquals("Output directory not empty", 0, new File(outDir.toString())
         .listFiles().length);
-    FileUtil.fullyDelete(new File(outDir.toString()));
   }
 
   public static class FakeFileSystem extends RawLocalFileSystem {
@@ -223,4 +222,9 @@ public class TestMRCJCFileOutputCommitter {
     assertTrue(th.getMessage().contains("fake delete failed"));
     assertTrue("job temp dir does not exists", jobTmpDir.exists());
   }
+
+  @After
+  public void teardown() {
+    FileUtil.fullyDelete(new File(outDir.toString()));
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8f1c374b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
index 53ad6bc..cbca3c8 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
@@ -34,12 +34,14 @@ import org.apache.commons.io.FileUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.jobhistory.EventType;
 import org.apache.hadoop.mapreduce.jobhistory.TestJobHistoryEventHandler;
 import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.YarnApplicationState;
@@ -460,21 +462,21 @@ public class TestMRTimelineEventHandling {
     conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
     conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
     MiniMRYarnCluster cluster = null;
+    FileSystem fs = null;
+    Path inDir = new Path(GenericTestUtils.getTempPath("input"));
+    Path outDir = new Path(GenericTestUtils.getTempPath("output"));
     try {
+      fs = FileSystem.get(conf);
       cluster = new MiniMRYarnCluster(
         TestMRTimelineEventHandling.class.getSimpleName(), 1);
       cluster.init(conf);
       cluster.start();
       conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
           MiniYARNCluster.getHostname() + ":"
-          + cluster.getApplicationHistoryServer().getPort());
+              + cluster.getApplicationHistoryServer().getPort());
       TimelineStore ts = cluster.getApplicationHistoryServer()
           .getTimelineStore();
 
-      String localPathRoot = System.getProperty("test.build.data",
-          "build/test/data");
-      Path inDir = new Path(localPathRoot, "input");
-      Path outDir = new Path(localPathRoot, "output");
       RunningJob job =
           UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
       Assert.assertEquals(JobStatus.SUCCEEDED,
@@ -496,6 +498,7 @@ public class TestMRTimelineEventHandling {
       if (cluster != null) {
         cluster.stop();
       }
+      deletePaths(fs, inDir, outDir);
     }
 
     conf = new YarnConfiguration();
@@ -509,15 +512,10 @@ public class TestMRTimelineEventHandling {
       cluster.start();
       conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
           MiniYARNCluster.getHostname() + ":"
-          + cluster.getApplicationHistoryServer().getPort());
+              + cluster.getApplicationHistoryServer().getPort());
       TimelineStore ts = cluster.getApplicationHistoryServer()
           .getTimelineStore();
 
-      String localPathRoot = System.getProperty("test.build.data",
-          "build/test/data");
-      Path inDir = new Path(localPathRoot, "input");
-      Path outDir = new Path(localPathRoot, "output");
-
       conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_EMIT_TIMELINE_DATA, false);
       RunningJob job =
           UtilsForTests.runJobSucceed(new JobConf(conf), inDir, outDir);
@@ -540,6 +538,20 @@ public class TestMRTimelineEventHandling {
       if (cluster != null) {
         cluster.stop();
       }
+      deletePaths(fs, inDir, outDir);
+    }
+  }
+
+  /** Delete input paths recursively. Paths should not be null. */
+  private void deletePaths(FileSystem fs, Path... paths) {
+    if (fs == null) {
+      return;
+    }
+    for (Path path : paths) {
+      try {
+        fs.delete(path, true);
+      } catch (Exception ignored) {
+      }
     }
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to