This is an automated email from the ASF dual-hosted git repository. weichiu pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git
The following commit(s) were added to refs/heads/trunk by this push: new 0984bbd67ae HADOOP-19440. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-sls. (#7553) 0984bbd67ae is described below commit 0984bbd67ae4941c485ee14531dc6880bcae29a4 Author: slfan1989 <55643692+slfan1...@users.noreply.github.com> AuthorDate: Fri Apr 4 01:06:04 2025 +0800 HADOOP-19440. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-sls. (#7553) --- .../apache/hadoop/yarn/sls/BaseSLSRunnerTest.java | 24 ++---- .../apache/hadoop/yarn/sls/TestDagAMSimulator.java | 4 +- .../yarn/sls/TestReservationSystemInvariants.java | 27 ++++-- .../hadoop/yarn/sls/TestSLSDagAMSimulator.java | 30 ++++--- .../hadoop/yarn/sls/TestSLSGenericSynth.java | 30 ++++--- .../org/apache/hadoop/yarn/sls/TestSLSRunner.java | 37 ++++++--- .../hadoop/yarn/sls/TestSLSStreamAMSynth.java | 42 ++++++---- .../hadoop/yarn/sls/TestSynthJobGeneration.java | 44 +++++----- .../hadoop/yarn/sls/appmaster/TestAMSimulator.java | 91 ++++++++++---------- .../yarn/sls/nodemanager/TestNMSimulator.java | 96 +++++++++++----------- .../hadoop/yarn/sls/scheduler/TestTaskRunner.java | 58 ++++++------- .../apache/hadoop/yarn/sls/utils/TestSLSUtils.java | 66 +++++++-------- .../apache/hadoop/yarn/sls/web/TestSLSWebApp.java | 22 ++--- 13 files changed, 311 insertions(+), 260 deletions(-) diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/BaseSLSRunnerTest.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/BaseSLSRunnerTest.java index 2c88e262c76..c16ff23f642 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/BaseSLSRunnerTest.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/BaseSLSRunnerTest.java @@ -22,14 +22,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.monitor.invariants.MetricsInvariantChecker; -import org.junit.After; -import org.junit.Assert; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; import org.junit.Assume; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameter; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; import java.io.File; import java.util.ArrayList; @@ -42,37 +39,32 @@ /** * This is a base class to ease the implementation of SLS-based tests. */ -@RunWith(value = Parameterized.class) @NotThreadSafe @SuppressWarnings("VisibilityModifier") public abstract class BaseSLSRunnerTest { - @Parameter(value = 0) public String schedulerType; - @Parameter(value = 1) public String traceType; - @Parameter(value = 2) public String traceLocation; - @Parameter(value = 3) public String nodeFile; protected SLSRunner sls; protected String ongoingInvariantFile; protected String exitInvariantFile; - @BeforeClass + @BeforeAll public static void checkForJavaScript() { Assume.assumeNotNull("JavaScript engine not available (JEP 372)", new ScriptEngineManager().getEngineByName("JavaScript")); } - @Before + @BeforeEach public abstract void setup(); - @After + @AfterEach public void tearDown() throws InterruptedException { if (sls != null) { sls.stop(); @@ -136,7 +128,7 @@ public void uncaughtException(Thread t, Throwable e) { if (!exceptionList.isEmpty()) { sls.stop(); - Assert.fail("TestSLSRunner caught exception from child thread " + Assertions.fail("TestSLSRunner caught exception from child thread " + "(TaskRunner.TaskDefinition): " + exceptionList); break; } diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestDagAMSimulator.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestDagAMSimulator.java index e458b860e4d..3dbb492682f 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestDagAMSimulator.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestDagAMSimulator.java @@ -20,12 +20,12 @@ import org.apache.hadoop.yarn.sls.appmaster.DAGAMSimulator; import org.apache.hadoop.yarn.sls.scheduler.ContainerSimulator; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.List; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestReservationSystemInvariants.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestReservationSystemInvariants.java index 22e1e2e729e..cc017dc6a7a 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestReservationSystemInvariants.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestReservationSystemInvariants.java @@ -27,10 +27,9 @@ import org.apache.hadoop.yarn.server.resourcemanager.monitor.invariants.ReservationInvariantsChecker; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import net.jcip.annotations.NotThreadSafe; @@ -38,11 +37,9 @@ * This test performs an SLS run enabling a * {@code ReservationInvariantsChecker}. */ -@RunWith(value = Parameterized.class) @NotThreadSafe public class TestReservationSystemInvariants extends BaseSLSRunnerTest { - @Parameters(name = "Testing with: {1}, {0}, (nodeFile {3})") public static Collection<Object[]> data() { // Test with both schedulers, and all three trace types return Arrays.asList(new Object[][] { @@ -53,10 +50,22 @@ public static Collection<Object[]> data() { }); } - @Test(timeout = 120000) - @SuppressWarnings("all") - public void testSimulatorRunning() throws Exception { + public void initTestReservationSystemInvariants(String pSchedulerType, + String pTraceType, String pTraceLocation, String pNodeFile) { + this.schedulerType = pSchedulerType; + this.traceType = pTraceType; + this.traceLocation = pTraceLocation; + this.nodeFile = pNodeFile; + setup(); + } + @ParameterizedTest(name = "Testing with: {1}, {0}, (nodeFile {3})") + @MethodSource("data") + @Timeout(value = 120) + @SuppressWarnings("all") + public void testSimulatorRunning(String pSchedulerType, + String pTraceType, String pTraceLocation, String pNodeFile) throws Exception { + initTestReservationSystemInvariants(pSchedulerType, pTraceType, pTraceLocation, pNodeFile); Configuration conf = new Configuration(false); conf.set(YarnConfiguration.RM_SCHEDULER, schedulerType); conf.setBoolean(YarnConfiguration.RM_SCHEDULER_ENABLE_MONITORS, true); diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSDagAMSimulator.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSDagAMSimulator.java index 54158c0083c..f8ae802ea94 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSDagAMSimulator.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSDagAMSimulator.java @@ -22,11 +22,10 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import java.util.Arrays; import java.util.Collection; @@ -34,11 +33,9 @@ /** * This test performs simple runs of the SLS with the generic syn json format. */ -@RunWith(value = Parameterized.class) @NotThreadSafe public class TestSLSDagAMSimulator extends BaseSLSRunnerTest { - @Parameters(name = "Testing with: {1}, {0}, (nodeFile {3})") public static Collection<Object[]> data() { String capScheduler = CapacityScheduler.class.getCanonicalName(); @@ -60,15 +57,28 @@ public static Collection<Object[]> data() { }); } - @Before + public void initTestSLSDagAMSimulator(String pSchedulerType, + String pTraceType, String pTraceLocation, String pNodeFile) { + this.schedulerType = pSchedulerType; + this.traceType = pTraceType; + this.traceLocation = pTraceLocation; + this.nodeFile = pNodeFile; + setup(); + } + + @BeforeEach public void setup() { ongoingInvariantFile = "src/test/resources/ongoing-invariants.txt"; exitInvariantFile = "src/test/resources/exit-invariants.txt"; } - @Test(timeout = 90000) + @ParameterizedTest(name = "Testing with: {1}, {0}, (nodeFile {3})") + @Timeout(value = 90) + @MethodSource("data") @SuppressWarnings("all") - public void testSimulatorRunning() throws Exception { + public void testSimulatorRunning(String pSchedulerType, + String pTraceType, String pTraceLocation, String pNodeFile) throws Exception { + initTestSLSDagAMSimulator(pSchedulerType, pTraceType, pTraceLocation, pNodeFile); Configuration conf = new Configuration(false); long timeTillShutdownInsec = 20L; runSLS(conf, timeTillShutdownInsec); diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSGenericSynth.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSGenericSynth.java index 79ebe219bfc..f72d32194b0 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSGenericSynth.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSGenericSynth.java @@ -22,11 +22,10 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import java.util.Arrays; import java.util.Collection; @@ -34,11 +33,9 @@ /** * This test performs simple runs of the SLS with the generic syn json format. */ -@RunWith(value = Parameterized.class) @NotThreadSafe public class TestSLSGenericSynth extends BaseSLSRunnerTest { - @Parameters(name = "Testing with: {1}, {0}, (nodeFile {3})") public static Collection<Object[]> data() { String capScheduler = CapacityScheduler.class.getCanonicalName(); @@ -60,15 +57,28 @@ public static Collection<Object[]> data() { }); } - @Before + @BeforeEach public void setup() { ongoingInvariantFile = "src/test/resources/ongoing-invariants.txt"; exitInvariantFile = "src/test/resources/exit-invariants.txt"; } - @Test(timeout = 90000) + public void initTestSLSGenericSynth(String pSchedulerType, + String pTraceType, String pTraceLocation, String pNodeFile) { + this.schedulerType = pSchedulerType; + this.traceType = pTraceType; + this.traceLocation = pTraceLocation; + this.nodeFile = pNodeFile; + setup(); + } + + @ParameterizedTest(name = "Testing with: {1}, {0}, (nodeFile {3})") + @Timeout(value = 90) @SuppressWarnings("all") - public void testSimulatorRunning() throws Exception { + @MethodSource("data") + public void testSimulatorRunning(String pSchedulerType, + String pTraceType, String pTraceLocation, String pNodeFile) throws Exception { + initTestSLSGenericSynth(pSchedulerType, pTraceType, pTraceLocation, pNodeFile); Configuration conf = new Configuration(false); long timeTillShutdownInsec = 20L; runSLS(conf, timeTillShutdownInsec); diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSRunner.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSRunner.java index 2463ccf06a9..e8610293be2 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSRunner.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSRunner.java @@ -23,26 +23,22 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler; import org.apache.hadoop.yarn.sls.conf.SLSConfiguration; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.*; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import java.security.Security; import java.util.*; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * This test performs simple runs of the SLS with different trace types and * schedulers. */ -@RunWith(value = Parameterized.class) @NotThreadSafe public class TestSLSRunner extends BaseSLSRunnerTest { - @Parameters(name = "Testing with: {1}, {0}, (nodeFile {3})") public static Collection<Object[]> data() { String capScheduler = CapacityScheduler.class.getCanonicalName(); @@ -76,15 +72,27 @@ public static Collection<Object[]> data() { }); } - @Before public void setup() { ongoingInvariantFile = "src/test/resources/ongoing-invariants.txt"; exitInvariantFile = "src/test/resources/exit-invariants.txt"; } - @Test(timeout = 90000) + public void initTestSLSRunner(String pSchedulerType, + String pTraceType, String pTraceLocation, String pNodeFile) { + this.schedulerType = pSchedulerType; + this.traceType = pTraceType; + this.traceLocation = pTraceLocation; + this.nodeFile = pNodeFile; + setup(); + } + + @ParameterizedTest(name = "Testing with: {1}, {0}, (nodeFile {3})") + @Timeout(value = 90) + @MethodSource("data") @SuppressWarnings("all") - public void testSimulatorRunning() throws Exception { + public void testSimulatorRunning(String pSchedulerType, + String pTraceType, String pTraceLocation, String pNodeFile) throws Exception { + initTestSLSRunner(pSchedulerType, pTraceType, pTraceLocation, pNodeFile); Configuration conf = new Configuration(false); long timeTillShutdownInsec = 20L; runSLS(conf, timeTillShutdownInsec); @@ -93,8 +101,11 @@ public void testSimulatorRunning() throws Exception { /** * Test to check whether caching is enabled based on config. */ - @Test - public void testEnableCaching() { + @ParameterizedTest(name = "Testing with: {1}, {0}, (nodeFile {3})") + @MethodSource("data") + public void testEnableCaching(String pSchedulerType, + String pTraceType, String pTraceLocation, String pNodeFile) { + initTestSLSRunner(pSchedulerType, pTraceType, pTraceLocation, pNodeFile); String networkCacheDefault = Security.getProperty( SLSRunner.NETWORK_CACHE_TTL); String networkNegativeCacheDefault = diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSStreamAMSynth.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSStreamAMSynth.java index a5d30e02d85..f12c960de98 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSStreamAMSynth.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSLSStreamAMSynth.java @@ -22,11 +22,9 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.Timeout; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import java.util.Arrays; import java.util.Collection; @@ -34,11 +32,9 @@ /** * This test performs simple runs of the SLS with the generic syn json format. */ -@RunWith(value = Parameterized.class) @NotThreadSafe public class TestSLSStreamAMSynth extends BaseSLSRunnerTest { - @Parameters(name = "Testing with: {1}, {0}, (nodeFile {3})") public static Collection<Object[]> data() { String capScheduler = CapacityScheduler.class.getCanonicalName(); @@ -47,28 +43,40 @@ public static Collection<Object[]> data() { String nodeFile = "src/test/resources/nodes.json"; // Test with both schedulers - return Arrays.asList(new Object[][] { + return Arrays.asList(new Object[][]{ - // covering the no nodeFile case - {capScheduler, "SYNTH", synthTraceFile, null }, + // covering the no nodeFile case + {capScheduler, "SYNTH", synthTraceFile, null}, - // covering new commandline and CapacityScheduler - {capScheduler, "SYNTH", synthTraceFile, nodeFile }, + // covering new commandline and CapacityScheduler + {capScheduler, "SYNTH", synthTraceFile, nodeFile}, - // covering FairScheduler - {fairScheduler, "SYNTH", synthTraceFile, nodeFile }, + // covering FairScheduler + {fairScheduler, "SYNTH", synthTraceFile, nodeFile}, }); } - @Before public void setup() { ongoingInvariantFile = "src/test/resources/ongoing-invariants.txt"; exitInvariantFile = "src/test/resources/exit-invariants.txt"; } - @Test(timeout = 90000) + public void initTestSLSStreamAMSynth(String pSchedulerType, + String pTraceType, String pTraceLocation, String pNodeFile) { + this.schedulerType = pSchedulerType; + this.traceType = pTraceType; + this.traceLocation = pTraceLocation; + this.nodeFile = pNodeFile; + setup(); + } + + @ParameterizedTest(name = "Testing with: {1}, {0}, (nodeFile {3})") + @MethodSource("data") + @Timeout(value = 90) @SuppressWarnings("all") - public void testSimulatorRunning() throws Exception { + public void testSimulatorRunning(String pSchedulerType, + String pTraceType, String pTraceLocation, String pNodeFile) throws Exception { + initTestSLSStreamAMSynth(pSchedulerType, pTraceType, pTraceLocation, pNodeFile); Configuration conf = new Configuration(false); long timeTillShutdownInsec = 20L; runSLS(conf, timeTillShutdownInsec); diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java index dd12a10f946..23d494be9d9 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/TestSynthJobGeneration.java @@ -27,8 +27,8 @@ import com.fasterxml.jackson.core.JsonFactoryBuilder; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -36,9 +36,9 @@ import java.util.Arrays; import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Simple test class driving the {@code SynthTraceJobProducer}, and validating @@ -88,12 +88,12 @@ public void testWorkloadGenerateTime() } } - Assert.assertTrue(bucket0 > 0); + Assertions.assertTrue(bucket0 > 0); assertEquals(0, bucket1); - Assert.assertTrue(bucket2 > 0); - Assert.assertTrue(bucket3 > 0); - Assert.assertTrue(bucket2 > bucket0); - Assert.assertTrue(bucket2 > bucket3); + Assertions.assertTrue(bucket2 > 0); + Assertions.assertTrue(bucket3 > 0); + Assertions.assertTrue(bucket2 > bucket0); + Assertions.assertTrue(bucket2 > bucket3); LOG.info("bucket0 {}, bucket1 {}, bucket2 {}, bucket3 {}", bucket0, bucket1, bucket2, bucket3); @@ -123,7 +123,7 @@ public void testMapReduce() throws IllegalArgumentException, IOException { jobCount++; } - Assert.assertEquals(stjp.getNumJobs(), jobCount); + Assertions.assertEquals(stjp.getNumJobs(), jobCount); } @Test @@ -148,7 +148,7 @@ public void testGeneric() throws IllegalArgumentException, IOException { jobCount++; } - Assert.assertEquals(stjp.getNumJobs(), jobCount); + Assertions.assertEquals(stjp.getNumJobs(), jobCount); } @Test @@ -173,7 +173,7 @@ public void testStream() throws IllegalArgumentException, IOException { jobCount++; } - Assert.assertEquals(stjp.getNumJobs(), jobCount); + Assertions.assertEquals(stjp.getNumJobs(), jobCount); } @Test @@ -192,28 +192,28 @@ public void testSample() throws IOException { mapper.readValue(valJson, SynthTraceJobProducer.Sample.class); valSample.init(rand); int val = valSample.getInt(); - Assert.assertEquals(5, val); + Assertions.assertEquals(5, val); String distJson = "{\"val\" : 5, \"std\" : 1 }"; SynthTraceJobProducer.Sample distSample = mapper.readValue(distJson, SynthTraceJobProducer.Sample.class); distSample.init(rand); double dist = distSample.getDouble(); - Assert.assertTrue(dist > 2 && dist < 8); + Assertions.assertTrue(dist > 2 && dist < 8); String normdistJson = "{\"val\" : 5, \"std\" : 1, \"dist\": \"NORM\" }"; SynthTraceJobProducer.Sample normdistSample = mapper.readValue(normdistJson, SynthTraceJobProducer.Sample.class); normdistSample.init(rand); double normdist = normdistSample.getDouble(); - Assert.assertTrue(normdist > 2 && normdist < 8); + Assertions.assertTrue(normdist > 2 && normdist < 8); String discreteJson = "{\"discrete\" : [2, 4, 6, 8]}"; SynthTraceJobProducer.Sample discreteSample = mapper.readValue(discreteJson, SynthTraceJobProducer.Sample.class); discreteSample.init(rand); int discrete = discreteSample.getInt(); - Assert.assertTrue( + Assertions.assertTrue( Arrays.asList(new Integer[] {2, 4, 6, 8}).contains(discrete)); String discreteWeightsJson = @@ -222,24 +222,24 @@ public void testSample() throws IOException { .readValue(discreteWeightsJson, SynthTraceJobProducer.Sample.class); discreteWeightsSample.init(rand); int discreteWeights = discreteWeightsSample.getInt(); - Assert.assertEquals(8, discreteWeights); + Assertions.assertEquals(8, discreteWeights); String invalidJson = "{\"val\" : 5, \"discrete\" : [2, 4, 6, 8], " + "\"weights\": [0, 0, 0, 1]}"; try { mapper.readValue(invalidJson, SynthTraceJobProducer.Sample.class); - Assert.fail(); + Assertions.fail(); } catch (JsonMappingException e) { - Assert.assertTrue(e.getMessage().startsWith("Instantiation of")); + Assertions.assertTrue(e.getMessage().startsWith("Instantiation of")); } String invalidDistJson = "{\"val\" : 5, \"std\" : 1, " + "\"dist\": \"INVALID\" }"; try { mapper.readValue(invalidDistJson, SynthTraceJobProducer.Sample.class); - Assert.fail(); + Assertions.fail(); } catch (JsonMappingException e) { - Assert.assertTrue(e.getMessage().startsWith("Cannot construct instance of")); + Assertions.assertTrue(e.getMessage().startsWith("Cannot construct instance of")); } } diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/TestAMSimulator.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/TestAMSimulator.java index f5db1684c71..ffd518d43de 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/TestAMSimulator.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/appmaster/TestAMSimulator.java @@ -41,12 +41,10 @@ import org.apache.hadoop.yarn.sls.nodemanager.NMSimulator; import org.apache.hadoop.yarn.sls.scheduler.*; import org.apache.hadoop.yarn.util.resource.Resources; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import org.mockito.Mockito; import java.io.IOException; @@ -63,7 +61,6 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -@RunWith(Parameterized.class) public class TestAMSimulator { private ResourceManager rm; private YarnConfiguration conf; @@ -72,7 +69,6 @@ public class TestAMSimulator { private Class<?> slsScheduler; private Class<?> scheduler; - @Parameterized.Parameters public static Collection<Object[]> params() { return Arrays.asList(new Object[][] { {SLSFairScheduler.class, FairScheduler.class}, @@ -80,12 +76,12 @@ public static Collection<Object[]> params() { }); } - public TestAMSimulator(Class<?> slsScheduler, Class<?> scheduler) { - this.slsScheduler = slsScheduler; - this.scheduler = scheduler; + public void initTestAMSimulator(Class<?> pSlsScheduler, Class<?> pScheduler) { + this.slsScheduler = pSlsScheduler; + this.scheduler = pScheduler; + setup(); } - @Before public void setup() { createMetricOutputDir(); @@ -129,8 +125,8 @@ private void verifySchedulerMetrics(String appId) { FairSchedulerMetrics.Metric.values()) { String key = "variable.app." + appId + "." + metric.getValue() + ".memory"; - Assert.assertTrue(metricRegistry.getGauges().containsKey(key)); - Assert.assertNotNull(metricRegistry.getGauges().get(key).getValue()); + Assertions.assertTrue(metricRegistry.getGauges().containsKey(key)); + Assertions.assertNotNull(metricRegistry.getGauges().get(key).getValue()); } } } @@ -141,7 +137,7 @@ private void createMetricOutputDir() { try { metricOutputDir = Files.createTempDirectory(testDir, "output"); } catch (IOException e) { - Assert.fail(e.toString()); + Assertions.fail(e.toString()); } } @@ -149,12 +145,14 @@ private void deleteMetricOutputDir() { try { FileUtils.deleteDirectory(metricOutputDir.toFile()); } catch (IOException e) { - Assert.fail(e.toString()); + Assertions.fail(e.toString()); } } - @Test - public void testAMSimulator() throws Exception { + @ParameterizedTest + @MethodSource("params") + public void testAMSimulator(Class<?> pSlsScheduler, Class<?> pScheduler) throws Exception { + initTestAMSimulator(pSlsScheduler, pScheduler); // Register one app MockAMSimulator app = new MockAMSimulator(); String appId = "app1"; @@ -179,15 +177,18 @@ public void testAMSimulator() throws Exception { verifySchedulerMetrics(appId); - Assert.assertEquals(1, rm.getRMContext().getRMApps().size()); - Assert.assertNotNull(rm.getRMContext().getRMApps().get(app.appId)); + Assertions.assertEquals(1, rm.getRMContext().getRMApps().size()); + Assertions.assertNotNull(rm.getRMContext().getRMApps().get(app.appId)); // Finish this app app.lastStep(); } - @Test - public void testAMSimulatorWithNodeLabels() throws Exception { + @ParameterizedTest + @MethodSource("params") + public void testAMSimulatorWithNodeLabels(Class<?> pSlsScheduler, Class<?> pScheduler) + throws Exception { + initTestAMSimulator(pSlsScheduler, pScheduler); if (scheduler.equals(CapacityScheduler.class)) { // add label to the cluster RMAdminCLI rmAdminCLI = new RMAdminCLI(conf); @@ -220,15 +221,18 @@ public void testAMSimulatorWithNodeLabels() throws Exception { ConcurrentMap<ApplicationId, RMApp> rmApps = rm.getRMContext().getRMApps(); - Assert.assertEquals(1, rmApps.size()); + Assertions.assertEquals(1, rmApps.size()); RMApp rmApp = rmApps.get(app.appId); - Assert.assertNotNull(rmApp); - Assert.assertEquals("label1", rmApp.getAmNodeLabelExpression()); + Assertions.assertNotNull(rmApp); + Assertions.assertEquals("label1", rmApp.getAmNodeLabelExpression()); } } - @Test - public void testPackageRequests() throws YarnException { + @ParameterizedTest + @MethodSource("params") + public void testPackageRequests(Class<?> pSlsScheduler, Class<?> pScheduler) + throws YarnException { + initTestAMSimulator(pSlsScheduler, pScheduler); MockAMSimulator app = new MockAMSimulator(); List<ContainerSimulator> containerSimulators = new ArrayList<>(); Resource resource = Resources.createResource(1024); @@ -269,7 +273,7 @@ public void testPackageRequests() throws YarnException { // are for same rack. // All resource requests for nodes would be packaged into 2 as there are // two different nodes. - Assert.assertEquals(4, res.size()); + Assertions.assertEquals(4, res.size()); int anyRequestCount = 0; int rackRequestCount = 0; int nodeRequestCount = 0; @@ -285,9 +289,9 @@ public void testPackageRequests() throws YarnException { } } - Assert.assertEquals(1, anyRequestCount); - Assert.assertEquals(1, rackRequestCount); - Assert.assertEquals(2, nodeRequestCount); + Assertions.assertEquals(1, anyRequestCount); + Assertions.assertEquals(1, rackRequestCount); + Assertions.assertEquals(2, nodeRequestCount); containerSimulators.clear(); s1 = ContainerSimulator.createFromTaskContainerDefinition( @@ -313,7 +317,7 @@ public void testPackageRequests() throws YarnException { // are for same rack but for two different allocation id. // All resource requests for nodes would be packaged into 3 as either node // or allocation id is different for each request. - Assert.assertEquals(7, res.size()); + Assertions.assertEquals(7, res.size()); anyRequestCount = 0; rackRequestCount = 0; @@ -323,7 +327,7 @@ public void testPackageRequests() throws YarnException { String resourceName = request.getResourceName(); long allocationId = request.getAllocationRequestId(); // allocation id should be either 1 or 2 - Assert.assertTrue(allocationId == 1 || allocationId == 2); + Assertions.assertTrue(allocationId == 1 || allocationId == 2); if (resourceName.equals("*")) { anyRequestCount++; } else if (resourceName.equals("/default-rack")) { @@ -333,13 +337,16 @@ public void testPackageRequests() throws YarnException { } } - Assert.assertEquals(2, anyRequestCount); - Assert.assertEquals(2, rackRequestCount); - Assert.assertEquals(3, nodeRequestCount); + Assertions.assertEquals(2, anyRequestCount); + Assertions.assertEquals(2, rackRequestCount); + Assertions.assertEquals(3, nodeRequestCount); } - @Test - public void testAMSimulatorRanNodesCleared() throws Exception { + @ParameterizedTest + @MethodSource("params") + public void testAMSimulatorRanNodesCleared(Class<?> pSlsScheduler, + Class<?> pScheduler) throws Exception { + initTestAMSimulator(pSlsScheduler, pScheduler); NMSimulator nm = new NMSimulator(); nm.init("/rack1/testNode1", Resources.createResource(1024 * 10, 10), 0, 1000, rm, -1f); @@ -354,11 +361,11 @@ public void testAMSimulatorRanNodesCleared() throws Exception { when(slsRunner.getNmMap()).thenReturn(nmMap); app.getRanNodes().add(nm.getNode().getNodeID()); nm.getNode().getRunningApps().add(app.appId); - Assert.assertTrue(nm.getNode().getRunningApps().contains(app.appId)); + Assertions.assertTrue(nm.getNode().getRunningApps().contains(app.appId)); app.lastStep(); - Assert.assertFalse(nm.getNode().getRunningApps().contains(app.appId)); - Assert.assertTrue(nm.getNode().getRunningApps().isEmpty()); + Assertions.assertFalse(nm.getNode().getRunningApps().contains(app.appId)); + Assertions.assertTrue(nm.getNode().getRunningApps().isEmpty()); } private TaskContainerDefinition createDefaultTaskContainerDefMock( Resource resource, int priority, ExecutionType execType, String type, @@ -375,7 +382,7 @@ private TaskContainerDefinition createDefaultTaskContainerDefMock( return taskContainerDef; } - @After + @AfterEach public void tearDown() { if (rm != null) { rm.stop(); diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/nodemanager/TestNMSimulator.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/nodemanager/TestNMSimulator.java index 6f7ff179246..306856998b5 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/nodemanager/TestNMSimulator.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/nodemanager/TestNMSimulator.java @@ -32,17 +32,14 @@ import org.apache.hadoop.yarn.sls.scheduler.SLSCapacityScheduler; import org.apache.hadoop.yarn.sls.scheduler.SLSFairScheduler; import org.apache.hadoop.yarn.util.resource.Resources; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import java.util.Arrays; import java.util.Collection; -@RunWith(Parameterized.class) public class TestNMSimulator { private final int GB = 1024; private ResourceManager rm; @@ -51,7 +48,6 @@ public class TestNMSimulator { private Class slsScheduler; private Class scheduler; - @Parameterized.Parameters public static Collection<Object[]> params() { return Arrays.asList(new Object[][] { {SLSFairScheduler.class, FairScheduler.class}, @@ -59,12 +55,12 @@ public static Collection<Object[]> params() { }); } - public TestNMSimulator(Class slsScheduler, Class scheduler) { - this.slsScheduler = slsScheduler; - this.scheduler = scheduler; + public void initTestNMSimulator(Class pSlsScheduler, Class pScheduler) { + this.slsScheduler = pSlsScheduler; + this.scheduler = pScheduler; + setup(); } - @Before public void setup() { conf = new YarnConfiguration(); conf.set(YarnConfiguration.RM_SCHEDULER, slsScheduler.getName()); @@ -75,8 +71,10 @@ public void setup() { rm.start(); } - @Test - public void testNMSimulator() throws Exception { + @ParameterizedTest + @MethodSource("params") + public void testNMSimulator(Class<?> pSlsScheduler, Class<?> pScheduler) throws Exception { + initTestNMSimulator(pSlsScheduler, pScheduler); // Register one node NMSimulator node1 = new NMSimulator(); node1.init("/rack1/node1", Resources.createResource(GB * 10, 10), 0, 1000, @@ -100,10 +98,10 @@ public void testNMSimulator() throws Exception { } }, 500, 10000); - Assert.assertEquals(1, rm.getResourceScheduler().getNumClusterNodes()); - Assert.assertEquals(GB * 10, + Assertions.assertEquals(1, rm.getResourceScheduler().getNumClusterNodes()); + Assertions.assertEquals(GB * 10, rm.getResourceScheduler().getRootQueueMetrics().getAvailableMB()); - Assert.assertEquals(10, + Assertions.assertEquals(10, rm.getResourceScheduler().getRootQueueMetrics() .getAvailableVirtualCores()); @@ -112,24 +110,24 @@ public void testNMSimulator() throws Exception { Container container1 = Container.newInstance(cId1, null, null, Resources.createResource(GB, 1), null, null); node1.addNewContainer(container1, 100000l, null); - Assert.assertTrue("Node1 should have one running container.", - node1.getRunningContainers().containsKey(cId1)); + Assertions.assertTrue( + node1.getRunningContainers().containsKey(cId1), "Node1 should have one running container."); // Allocate one AM container on node1 ContainerId cId2 = newContainerId(2, 1, 1); Container container2 = Container.newInstance(cId2, null, null, Resources.createResource(GB, 1), null, null); node1.addNewContainer(container2, -1l, null); - Assert.assertTrue("Node1 should have one running AM container", - node1.getAMContainers().contains(cId2)); + Assertions.assertTrue( + node1.getAMContainers().contains(cId2), "Node1 should have one running AM container"); // Remove containers node1.cleanupContainer(cId1); - Assert.assertTrue("Container1 should be removed from Node1.", - node1.getCompletedContainers().contains(cId1)); + Assertions.assertTrue( + node1.getCompletedContainers().contains(cId1), "Container1 should be removed from Node1."); node1.cleanupContainer(cId2); - Assert.assertFalse("Container2 should be removed from Node1.", - node1.getAMContainers().contains(cId2)); + Assertions.assertFalse( + node1.getAMContainers().contains(cId2), "Container2 should be removed from Node1."); } private ContainerId newContainerId(int appId, int appAttemptId, int cId) { @@ -139,8 +137,11 @@ private ContainerId newContainerId(int appId, int appAttemptId, int cId) { appAttemptId), cId); } - @Test - public void testNMSimAppAddedAndRemoved() throws Exception { + @ParameterizedTest + @MethodSource("params") + public void testNMSimAppAddedAndRemoved(Class<?> pSlsScheduler, Class<?> pScheduler) + throws Exception { + initTestNMSimulator(pSlsScheduler, pScheduler); // Register one node NMSimulator node = new NMSimulator(); node.init("/rack1/node1", Resources.createResource(GB * 10, 10), 0, 1000, @@ -162,8 +163,8 @@ public void testNMSimAppAddedAndRemoved() throws Exception { .getAvailableMB() > 0, 500, 10000); - Assert.assertEquals("Node should have no runningApps.", - node.getNode().getRunningApps().size(), 0); + Assertions.assertEquals( + node.getNode().getRunningApps().size(), 0, "Node should have no runningApps."); // Allocate one app container on node ApplicationId appId = BuilderUtils.newApplicationId(1, 1); @@ -173,21 +174,24 @@ public void testNMSimAppAddedAndRemoved() throws Exception { Container container = Container.newInstance(cId, null, null, Resources.createResource(GB, 1), null, null); node.addNewContainer(container, 100000l, appId); - Assert.assertTrue("Node should have app: " - + appId + " in runningApps list.", - node.getNode().getRunningApps().contains(appId)); + Assertions.assertTrue( + node.getNode().getRunningApps().contains(appId), "Node should have app: " + + appId + " in runningApps list."); // Finish the app on the node. node.finishApplication(appId); - Assert.assertFalse("Node should not have app: " - + appId + " in runningApps list.", - node.getNode().getRunningApps().contains(appId)); - Assert.assertEquals("Node should have no runningApps.", - node.getNode().getRunningApps().size(), 0); + Assertions.assertFalse( + node.getNode().getRunningApps().contains(appId), "Node should not have app: " + + appId + " in runningApps list."); + Assertions.assertEquals( + node.getNode().getRunningApps().size(), 0, "Node should have no runningApps."); } - @Test - public void testNMSimNullAppAddedAndRemoved() throws Exception { + @ParameterizedTest + @MethodSource("params") + public void testNMSimNullAppAddedAndRemoved(Class<?> pSlsScheduler, Class<?> pScheduler) + throws Exception { + initTestNMSimulator(pSlsScheduler, pScheduler); // Register one node NMSimulator node = new NMSimulator(); node.init("/rack1/node1", Resources.createResource(GB * 10, 10), 0, 1000, @@ -209,25 +213,25 @@ public void testNMSimNullAppAddedAndRemoved() throws Exception { .getAvailableMB() > 0, 500, 10000); - Assert.assertEquals("Node should have no runningApps.", - node.getNode().getRunningApps().size(), 0); + Assertions.assertEquals( + node.getNode().getRunningApps().size(), 0, "Node should have no runningApps."); // Allocate null app container on node ContainerId cId = newContainerId(1, 1, 1); Container container = Container.newInstance(cId, null, null, Resources.createResource(GB, 1), null, null); node.addNewContainer(container, 100000l, null); - Assert.assertEquals("Node should have no runningApps if appId is null.", - node.getNode().getRunningApps().size(), 0); + Assertions.assertEquals( + node.getNode().getRunningApps().size(), 0, "Node should have no runningApps if appId is null."); // Finish non-existent app on the node. ApplicationId appId = BuilderUtils.newApplicationId(1, 1); node.finishApplication(appId); - Assert.assertEquals("Node should have no runningApps.", - node.getNode().getRunningApps().size(), 0); + Assertions.assertEquals( + node.getNode().getRunningApps().size(), 0, "Node should have no runningApps."); } - @After + @AfterEach public void tearDown() throws Exception { rm.stop(); } diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/scheduler/TestTaskRunner.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/scheduler/TestTaskRunner.java index ce6c1b30b65..143e52b8fdd 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/scheduler/TestTaskRunner.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/scheduler/TestTaskRunner.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.yarn.sls.scheduler; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -28,13 +28,13 @@ public class TestTaskRunner { private TaskRunner runner; - @Before + @BeforeEach public void setUp() { runner = new TaskRunner(); runner.setQueueSize(5); } - @After + @AfterEach public void cleanUp() throws InterruptedException { runner.stop(); } @@ -50,7 +50,7 @@ public SingleTask(long startTime) { @Override public void firstStep() { if (first) { - Assert.fail(); + Assertions.fail(); } first = true; latch.countDown(); @@ -58,12 +58,12 @@ public void firstStep() { @Override public void middleStep() { - Assert.fail(); + Assertions.fail(); } @Override public void lastStep() { - Assert.fail(); + Assertions.fail(); } } @@ -72,7 +72,7 @@ public void testSingleTask() throws Exception { runner.start(); runner.schedule(new SingleTask(0)); SingleTask.latch.await(5000, TimeUnit.MILLISECONDS); - Assert.assertTrue(SingleTask.first); + Assertions.assertTrue(SingleTask.first); } public static class DualTask extends TaskRunner.Task { @@ -87,20 +87,20 @@ public DualTask(long startTime, long endTime, long interval) { @Override public void firstStep() { if (first) { - Assert.fail(); + Assertions.fail(); } first = true; } @Override public void middleStep() { - Assert.fail(); + Assertions.fail(); } @Override public void lastStep() { if (last) { - Assert.fail(); + Assertions.fail(); } last = true; latch.countDown(); @@ -112,8 +112,8 @@ public void testDualTask() throws Exception { runner.start(); runner.schedule(new DualTask(0, 10, 10)); DualTask.latch.await(5000, TimeUnit.MILLISECONDS); - Assert.assertTrue(DualTask.first); - Assert.assertTrue(DualTask.last); + Assertions.assertTrue(DualTask.first); + Assertions.assertTrue(DualTask.last); } public static class TriTask extends TaskRunner.Task { @@ -129,7 +129,7 @@ public TriTask(long startTime, long endTime, long interval) { @Override public void firstStep() { if (first) { - Assert.fail(); + Assertions.fail(); } first = true; } @@ -137,7 +137,7 @@ public void firstStep() { @Override public void middleStep() { if (middle) { - Assert.fail(); + Assertions.fail(); } middle = true; } @@ -145,7 +145,7 @@ public void middleStep() { @Override public void lastStep() { if (last) { - Assert.fail(); + Assertions.fail(); } last = true; latch.countDown(); @@ -157,9 +157,9 @@ public void testTriTask() throws Exception { runner.start(); runner.schedule(new TriTask(0, 10, 5)); TriTask.latch.await(5000, TimeUnit.MILLISECONDS); - Assert.assertTrue(TriTask.first); - Assert.assertTrue(TriTask.middle); - Assert.assertTrue(TriTask.last); + Assertions.assertTrue(TriTask.first); + Assertions.assertTrue(TriTask.middle); + Assertions.assertTrue(TriTask.last); } public static class MultiTask extends TaskRunner.Task { @@ -175,7 +175,7 @@ public MultiTask(long startTime, long endTime, long interval) { @Override public void firstStep() { if (first) { - Assert.fail(); + Assertions.fail(); } first = true; } @@ -188,7 +188,7 @@ public void middleStep() { @Override public void lastStep() { if (last) { - Assert.fail(); + Assertions.fail(); } last = true; latch.countDown(); @@ -200,9 +200,9 @@ public void testMultiTask() throws Exception { runner.start(); runner.schedule(new MultiTask(0, 20, 5)); MultiTask.latch.await(5000, TimeUnit.MILLISECONDS); - Assert.assertTrue(MultiTask.first); - Assert.assertEquals((20 - 0) / 5 - 2 + 1, MultiTask.middle); - Assert.assertTrue(MultiTask.last); + Assertions.assertTrue(MultiTask.first); + Assertions.assertEquals((20 - 0) / 5 - 2 + 1, MultiTask.middle); + Assertions.assertTrue(MultiTask.last); } @@ -217,7 +217,7 @@ public PreStartTask(long startTime) { @Override public void firstStep() { if (first) { - Assert.fail(); + Assertions.fail(); } first = true; latch.countDown(); @@ -240,8 +240,8 @@ public void testPreStartQueueing() throws Exception { long startedAt = System.currentTimeMillis(); PreStartTask.latch.await(5000, TimeUnit.MILLISECONDS); long runAt = System.currentTimeMillis(); - Assert.assertTrue(PreStartTask.first); - Assert.assertTrue(runAt - startedAt >= 200); + Assertions.assertTrue(PreStartTask.first); + Assertions.assertTrue(runAt - startedAt >= 200); } } diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/utils/TestSLSUtils.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/utils/TestSLSUtils.java index 5376a55cc63..f5e3e89b192 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/utils/TestSLSUtils.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/utils/TestSLSUtils.java @@ -21,8 +21,8 @@ import org.apache.hadoop.yarn.api.records.NodeLabel; import org.apache.hadoop.yarn.sls.SLSRunner.NodeDetails; import org.apache.hadoop.yarn.util.resource.Resources; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; import java.io.File; import java.nio.file.Files; @@ -37,13 +37,13 @@ public class TestSLSUtils { public void testGetRackHostname() { String str = "/rack1/node1"; String[] rackHostname = SLSUtils.getRackHostName(str); - Assert.assertEquals("rack1", rackHostname[0]); - Assert.assertEquals("node1", rackHostname[1]); + Assertions.assertEquals("rack1", rackHostname[0]); + Assertions.assertEquals("node1", rackHostname[1]); str = "/rackA/rackB/node1"; rackHostname = SLSUtils.getRackHostName(str); - Assert.assertEquals("rackA/rackB", rackHostname[0]); - Assert.assertEquals("node1", rackHostname[1]); + Assertions.assertEquals("rackA/rackB", rackHostname[0]); + Assertions.assertEquals("node1", rackHostname[1]); } @Test @@ -51,45 +51,45 @@ public void testParseNodesFromNodeFile() throws Exception { String nodeFile = "src/test/resources/nodes.json"; Set<NodeDetails> nodeDetails = SLSUtils.parseNodesFromNodeFile( nodeFile, Resources.createResource(1024, 2)); - Assert.assertEquals(20, nodeDetails.size()); + Assertions.assertEquals(20, nodeDetails.size()); nodeFile = "src/test/resources/nodes-with-resources.json"; nodeDetails = SLSUtils.parseNodesFromNodeFile( nodeFile, Resources.createResource(1024, 2)); - Assert.assertEquals(4, nodeDetails.size()); + Assertions.assertEquals(4, nodeDetails.size()); for (NodeDetails nodeDetail : nodeDetails) { if (nodeDetail.getHostname().equals("/rack1/node1")) { - Assert.assertEquals(2048, + Assertions.assertEquals(2048, nodeDetail.getNodeResource().getMemorySize()); - Assert.assertEquals(6, + Assertions.assertEquals(6, nodeDetail.getNodeResource().getVirtualCores()); } else if (nodeDetail.getHostname().equals("/rack1/node2")) { - Assert.assertEquals(1024, + Assertions.assertEquals(1024, nodeDetail.getNodeResource().getMemorySize()); - Assert.assertEquals(2, + Assertions.assertEquals(2, nodeDetail.getNodeResource().getVirtualCores()); - Assert.assertNull(nodeDetail.getLabels()); + Assertions.assertNull(nodeDetail.getLabels()); } else if (nodeDetail.getHostname().equals("/rack1/node3")) { - Assert.assertEquals(1024, + Assertions.assertEquals(1024, nodeDetail.getNodeResource().getMemorySize()); - Assert.assertEquals(2, + Assertions.assertEquals(2, nodeDetail.getNodeResource().getVirtualCores()); - Assert.assertEquals(2, nodeDetail.getLabels().size()); + Assertions.assertEquals(2, nodeDetail.getLabels().size()); for (NodeLabel nodeLabel : nodeDetail.getLabels()) { if (nodeLabel.getName().equals("label1")) { - Assert.assertTrue(nodeLabel.isExclusive()); + Assertions.assertTrue(nodeLabel.isExclusive()); } else if(nodeLabel.getName().equals("label2")) { - Assert.assertFalse(nodeLabel.isExclusive()); + Assertions.assertFalse(nodeLabel.isExclusive()); } else { - Assert.fail("Unexpected label"); + Assertions.fail("Unexpected label"); } } } else if (nodeDetail.getHostname().equals("/rack1/node4")) { - Assert.assertEquals(6144, + Assertions.assertEquals(6144, nodeDetail.getNodeResource().getMemorySize()); - Assert.assertEquals(12, + Assertions.assertEquals(12, nodeDetail.getNodeResource().getVirtualCores()); - Assert.assertEquals(2, nodeDetail.getLabels().size()); + Assertions.assertEquals(2, nodeDetail.getLabels().size()); } } } @@ -97,20 +97,20 @@ public void testParseNodesFromNodeFile() throws Exception { @Test public void testGenerateNodes() { Set<NodeDetails> nodes = SLSUtils.generateNodes(3, 3); - Assert.assertEquals("Number of nodes is wrong.", 3, nodes.size()); - Assert.assertEquals("Number of racks is wrong.", 3, getNumRack(nodes)); + Assertions.assertEquals(3, nodes.size(), "Number of nodes is wrong."); + Assertions.assertEquals(3, getNumRack(nodes), "Number of racks is wrong."); nodes = SLSUtils.generateNodes(3, 1); - Assert.assertEquals("Number of nodes is wrong.", 3, nodes.size()); - Assert.assertEquals("Number of racks is wrong.", 1, getNumRack(nodes)); + Assertions.assertEquals(3, nodes.size(), "Number of nodes is wrong."); + Assertions.assertEquals(1, getNumRack(nodes), "Number of racks is wrong."); nodes = SLSUtils.generateNodes(3, 4); - Assert.assertEquals("Number of nodes is wrong.", 3, nodes.size()); - Assert.assertEquals("Number of racks is wrong.", 3, getNumRack(nodes)); + Assertions.assertEquals(3, nodes.size(), "Number of nodes is wrong."); + Assertions.assertEquals(3, getNumRack(nodes), "Number of racks is wrong."); nodes = SLSUtils.generateNodes(3, 0); - Assert.assertEquals("Number of nodes is wrong.", 3, nodes.size()); - Assert.assertEquals("Number of racks is wrong.", 1, getNumRack(nodes)); + Assertions.assertEquals(3, nodes.size(), "Number of nodes is wrong."); + Assertions.assertEquals(1, getNumRack(nodes), "Number of racks is wrong."); } /** @@ -126,10 +126,10 @@ public void testGenerateNodeTableMapping() throws Exception { SLSUtils.generateNodeTableMapping(nodes, fileName); List<String> lines = Files.readAllLines(Paths.get(fileName)); - Assert.assertEquals(3, lines.size()); + Assertions.assertEquals(3, lines.size()); for (String line : lines) { - Assert.assertTrue(line.contains("node")); - Assert.assertTrue(line.contains("/rack")); + Assertions.assertTrue(line.contains("node")); + Assertions.assertTrue(line.contains("/rack")); } } diff --git a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/web/TestSLSWebApp.java b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/web/TestSLSWebApp.java index c3f66a0b74d..2213c606e86 100644 --- a/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/web/TestSLSWebApp.java +++ b/hadoop-tools/hadoop-sls/src/test/java/org/apache/hadoop/yarn/sls/web/TestSLSWebApp.java @@ -18,9 +18,9 @@ package org.apache.hadoop.yarn.sls.web; -import org.junit.Assert; +import org.junit.jupiter.api.Assertions; import org.apache.commons.io.FileUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.File; import java.nio.charset.StandardCharsets; @@ -60,13 +60,13 @@ public void testSimulateInfoPageHtmlTemplate() throws Exception { String simulateInfo = MessageFormat.format(simulateInfoTemplate, info.toString()); - Assert.assertTrue("The simulate info html page should not be empty", - simulateInfo.length() > 0); + Assertions.assertTrue( + simulateInfo.length() > 0, "The simulate info html page should not be empty"); for (Map.Entry<String, Object> entry : simulateInfoMap.entrySet()) { - Assert.assertTrue("The simulate info html page should have information " - + "of " + entry.getKey(), simulateInfo.contains("<td class='td1'>" + Assertions.assertTrue(simulateInfo.contains("<td class='td1'>" + entry.getKey() + "</td><td class='td2'>" - + entry.getValue() + "</td>")); + + entry.getValue() + "</td>"), "The simulate info html page should have information " + + "of " + entry.getKey()); } } @@ -90,8 +90,8 @@ public void testSimulatePageHtmlTemplate() throws Exception { } String simulateInfo = MessageFormat.format(simulateTemplate, queueInfo, "s", 1000, 1000); - Assert.assertTrue("The simulate page html page should not be empty", - simulateInfo.length() > 0); + Assertions.assertTrue( + simulateInfo.length() > 0, "The simulate page html page should not be empty"); } @Test @@ -116,7 +116,7 @@ public void testTrackPageHtmlTemplate() throws Exception { } String trackInfo = MessageFormat.format(trackTemplate, trackedQueueInfo, trackedAppInfo, "s", 1000, 1000); - Assert.assertTrue("The queue/app tracking html page should not be empty", - trackInfo.length() > 0); + Assertions.assertTrue( + trackInfo.length() > 0, "The queue/app tracking html page should not be empty"); } } --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org