This is an automated email from the ASF dual-hosted git repository.

ayushsaxena pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/branch-3.4 by this push:
     new 82f6d1184c8b MAPREDUCE-7475. Fix non-idempotent unit tests (#6785) 
(#6837)
82f6d1184c8b is described below

commit 82f6d1184c8b7f6d1278e9ecd59cd0ce467254ba
Author: Kaiyao Ke <47203510+kaiya...@users.noreply.github.com>
AuthorDate: Sun May 19 15:32:54 2024 -0500

    MAPREDUCE-7475. Fix non-idempotent unit tests (#6785) (#6837)
    
    Contributed by Kaiyao Ke
---
 .../mapreduce/v2/app/webapp/TestAppController.java |  2 ++
 .../java/org/apache/hadoop/mapred/TestMapTask.java | 18 ++++++++++++-----
 .../hadoop/mapred/TestTaskProgressReporter.java    |  6 ++++++
 .../apache/hadoop/mapred/NotificationTestCase.java |  2 ++
 .../hadoop/mapred/TestOldCombinerGrouping.java     | 23 ++++++++++++++--------
 .../hadoop/mapreduce/TestNewCombinerGrouping.java  | 23 ++++++++++++++--------
 6 files changed, 53 insertions(+), 21 deletions(-)

diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAppController.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAppController.java
index ba5c43012146..473681c3e424 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAppController.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAppController.java
@@ -319,6 +319,8 @@ public class TestAppController {
     appController.attempts();
 
     assertEquals(AttemptsPage.class, appController.getClazz());
+
+    appController.getProperty().remove(AMParams.ATTEMPT_STATE);
   }
 
 }
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestMapTask.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestMapTask.java
index fef179994f09..771a5313ec32 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestMapTask.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestMapTask.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.mapreduce.TaskCounter;
 import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.util.Progress;
 import org.junit.After;
+import org.junit.Before;
 import org.junit.Assert;
 import org.junit.Rule;
 import org.junit.Test;
@@ -47,14 +48,21 @@ import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.mock;
 
 public class TestMapTask {
-  private static File TEST_ROOT_DIR = new File(
+  private static File testRootDir = new File(
       System.getProperty("test.build.data",
           System.getProperty("java.io.tmpdir", "/tmp")),
       TestMapTask.class.getName());
 
+  @Before
+  public void setup() throws Exception {
+    if(!testRootDir.exists()) {
+      testRootDir.mkdirs();
+    }
+  }
+
   @After
   public void cleanup() throws Exception {
-    FileUtil.fullyDelete(TEST_ROOT_DIR);
+    FileUtil.fullyDelete(testRootDir);
   }
 
   @Rule
@@ -66,7 +74,7 @@ public class TestMapTask {
   public void testShufflePermissions() throws Exception {
     JobConf conf = new JobConf();
     conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
-    conf.set(MRConfig.LOCAL_DIR, TEST_ROOT_DIR.getAbsolutePath());
+    conf.set(MRConfig.LOCAL_DIR, testRootDir.getAbsolutePath());
     MapOutputFile mof = new MROutputFiles();
     mof.setConf(conf);
     TaskAttemptID attemptId = new TaskAttemptID("12345", 1, TaskType.MAP, 1, 
1);
@@ -98,7 +106,7 @@ public class TestMapTask {
   public void testSpillFilesCountLimitInvalidValue() throws Exception {
     JobConf conf = new JobConf();
     conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
-    conf.set(MRConfig.LOCAL_DIR, TEST_ROOT_DIR.getAbsolutePath());
+    conf.set(MRConfig.LOCAL_DIR, testRootDir.getAbsolutePath());
     conf.setInt(MRJobConfig.SPILL_FILES_COUNT_LIMIT, -2);
     MapOutputFile mof = new MROutputFiles();
     mof.setConf(conf);
@@ -124,7 +132,7 @@ public class TestMapTask {
   public void testSpillFilesCountBreach() throws Exception {
     JobConf conf = new JobConf();
     conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
-    conf.set(MRConfig.LOCAL_DIR, TEST_ROOT_DIR.getAbsolutePath());
+    conf.set(MRConfig.LOCAL_DIR, testRootDir.getAbsolutePath());
     conf.setInt(MRJobConfig.SPILL_FILES_COUNT_LIMIT, 2);
     MapOutputFile mof = new MROutputFiles();
     mof.setConf(conf);
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskProgressReporter.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskProgressReporter.java
index 52875b7aca70..93602935c9af 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskProgressReporter.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestTaskProgressReporter.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.checkpoint.TaskCheckpointID;
 import org.apache.hadoop.util.ExitUtil;
 import org.junit.After;
+import org.junit.Before;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -180,6 +181,11 @@ public class TestTaskProgressReporter {
     }
   }
 
+  @Before
+  public void setup() {
+    statusUpdateTimes = 0;
+  }
+
   @After
   public void cleanup() {
     FileSystem.clearStatistics();
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
index 3372c8f28b6f..8acd015ab098 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
@@ -158,6 +158,8 @@ public abstract class NotificationTestCase extends 
HadoopTestCase {
   @After
   public void tearDown() throws Exception {
     stopHttpServer();
+    NotificationServlet.counter = 0;
+    NotificationServlet.failureCounter = 0;
     super.tearDown();
   }
 
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java
index 046c2d37eed9..1f6395dfb789 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestOldCombinerGrouping.java
@@ -18,11 +18,16 @@
 
 package org.apache.hadoop.mapred;
 
+import org.junit.After;
 import org.junit.Assert;
+
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.RawComparator;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.test.GenericTestUtils;
+
 import org.junit.Test;
 
 import java.io.BufferedReader;
@@ -34,12 +39,9 @@ import java.io.PrintWriter;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.Set;
-import java.util.UUID;
 
 public class TestOldCombinerGrouping {
-  private static String TEST_ROOT_DIR = new File(System.getProperty(
-      "test.build.data", "build/test/data"), UUID.randomUUID().toString())
-          .getAbsolutePath();
+  private static File testRootDir = GenericTestUtils.getRandomizedTestDir();
 
   public static class Map implements
       Mapper<LongWritable, Text, Text, LongWritable> {
@@ -117,16 +119,21 @@ public class TestOldCombinerGrouping {
 
   }
 
+  @After
+  public void cleanup() {
+    FileUtil.fullyDelete(testRootDir);
+  }
+
   @Test
   public void testCombiner() throws Exception {
-    if (!new File(TEST_ROOT_DIR).mkdirs()) {
-      throw new RuntimeException("Could not create test dir: " + 
TEST_ROOT_DIR);
+    if (!testRootDir.mkdirs()) {
+      throw new RuntimeException("Could not create test dir: " + testRootDir);
     }
-    File in = new File(TEST_ROOT_DIR, "input");
+    File in = new File(testRootDir, "input");
     if (!in.mkdirs()) {
       throw new RuntimeException("Could not create test dir: " + in);
     }
-    File out = new File(TEST_ROOT_DIR, "output");
+    File out = new File(testRootDir, "output");
     PrintWriter pw = new PrintWriter(new FileWriter(new File(in, "data.txt")));
     pw.println("A|a,1");
     pw.println("A|b,2");
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestNewCombinerGrouping.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestNewCombinerGrouping.java
index c2054f1d4c1e..df9c6c5e9c19 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestNewCombinerGrouping.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestNewCombinerGrouping.java
@@ -18,7 +18,10 @@
 
 package org.apache.hadoop.mapreduce;
 
+import org.junit.After;
 import org.junit.Assert;
+
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.RawComparator;
@@ -26,6 +29,8 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
+
+import org.apache.hadoop.test.GenericTestUtils;
 import org.junit.Test;
 
 import java.io.BufferedReader;
@@ -36,12 +41,9 @@ import java.io.IOException;
 import java.io.PrintWriter;
 import java.util.HashSet;
 import java.util.Set;
-import java.util.UUID;
 
 public class TestNewCombinerGrouping {
-  private static String TEST_ROOT_DIR = new File(System.getProperty(
-      "test.build.data", "build/test/data"), UUID.randomUUID().toString())
-          .getAbsolutePath();
+  private static File testRootDir = GenericTestUtils.getRandomizedTestDir();
 
   public static class Map extends
       Mapper<LongWritable, Text, Text, LongWritable> {
@@ -103,16 +105,21 @@ public class TestNewCombinerGrouping {
 
   }
 
+  @After
+  public void cleanup() {
+    FileUtil.fullyDelete(testRootDir);
+  }
+
   @Test
   public void testCombiner() throws Exception {
-    if (!new File(TEST_ROOT_DIR).mkdirs()) {
-      throw new RuntimeException("Could not create test dir: " + 
TEST_ROOT_DIR);
+    if (!testRootDir.mkdirs()) {
+      throw new RuntimeException("Could not create test dir: " + testRootDir);
     }
-    File in = new File(TEST_ROOT_DIR, "input");
+    File in = new File(testRootDir, "input");
     if (!in.mkdirs()) {
       throw new RuntimeException("Could not create test dir: " + in);
     }
-    File out = new File(TEST_ROOT_DIR, "output");
+    File out = new File(testRootDir, "output");
     PrintWriter pw = new PrintWriter(new FileWriter(new File(in, "data.txt")));
     pw.println("A|a,1");
     pw.println("A|b,2");


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to