This is an automated email from the ASF dual-hosted git repository. slfan1989 pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/hadoop.git
The following commit(s) were added to refs/heads/trunk by this push: new 96c380d1522 HADOOP-19410. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-client-integration-tests. (#7650) 96c380d1522 is described below commit 96c380d152200229df789c27f1086d74e055570b Author: slfan1989 <55643692+slfan1...@users.noreply.github.com> AuthorDate: Sat Apr 26 08:30:11 2025 +0800 HADOOP-19410. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-client-integration-tests. (#7650) * HADOOP-19410. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-client-integration-tests. (#7650) Co-authored-by: Chris Nauroth <cnaur...@apache.org> Reviewed-by: Chris Nauroth <cnaur...@apache.org> Signed-off-by: Shilun Fan <slfan1...@apache.org> --- .../hadoop-client-integration-tests/pom.xml | 20 ++++++++++++++++++++ .../org/apache/hadoop/example/ITUseHadoopCodecs.java | 10 +++++----- .../org/apache/hadoop/example/ITUseMiniCluster.java | 15 ++++++++------- 3 files changed, 33 insertions(+), 12 deletions(-) diff --git a/hadoop-client-modules/hadoop-client-integration-tests/pom.xml b/hadoop-client-modules/hadoop-client-integration-tests/pom.xml index f66b7e4b187..db4091a61f9 100644 --- a/hadoop-client-modules/hadoop-client-integration-tests/pom.xml +++ b/hadoop-client-modules/hadoop-client-integration-tests/pom.xml @@ -47,6 +47,26 @@ <artifactId>junit</artifactId> <scope>test</scope> </dependency> + <dependency> + <groupId>org.junit.jupiter</groupId> + <artifactId>junit-jupiter-api</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.junit.jupiter</groupId> + <artifactId>junit-jupiter-engine</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.junit.jupiter</groupId> + <artifactId>junit-jupiter-params</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.junit.platform</groupId> + <artifactId>junit-platform-launcher</artifactId> + <scope>test</scope> + </dependency> <dependency> <groupId>org.lz4</groupId> <artifactId>lz4-java</artifactId> diff --git a/hadoop-client-modules/hadoop-client-integration-tests/src/test/java/org/apache/hadoop/example/ITUseHadoopCodecs.java b/hadoop-client-modules/hadoop-client-integration-tests/src/test/java/org/apache/hadoop/example/ITUseHadoopCodecs.java index fd0effa143b..cc1f133fed5 100644 --- a/hadoop-client-modules/hadoop-client-integration-tests/src/test/java/org/apache/hadoop/example/ITUseHadoopCodecs.java +++ b/hadoop-client-modules/hadoop-client-integration-tests/src/test/java/org/apache/hadoop/example/ITUseHadoopCodecs.java @@ -20,8 +20,8 @@ package org.apache.hadoop.example; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.io.*; import java.util.Arrays; @@ -37,7 +37,7 @@ import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.zlib.ZlibFactory; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -134,8 +134,8 @@ private void codecTest(Configuration conf, int seed, int count, String codecClas int expected; do { expected = originalIn.read(); - assertEquals("Inflated stream read by byte does not match", - expected, inflateFilter.read()); + assertEquals(expected, inflateFilter.read(), + "Inflated stream read by byte does not match"); } while (expected != -1); } diff --git a/hadoop-client-modules/hadoop-client-integration-tests/src/test/java/org/apache/hadoop/example/ITUseMiniCluster.java b/hadoop-client-modules/hadoop-client-integration-tests/src/test/java/org/apache/hadoop/example/ITUseMiniCluster.java index 2e304861bab..7f0b150d736 100644 --- a/hadoop-client-modules/hadoop-client-integration-tests/src/test/java/org/apache/hadoop/example/ITUseMiniCluster.java +++ b/hadoop-client-modules/hadoop-client-integration-tests/src/test/java/org/apache/hadoop/example/ITUseMiniCluster.java @@ -20,13 +20,14 @@ package org.apache.hadoop.example; +import static org.junit.jupiter.api.Assertions.assertEquals; + import java.io.IOException; import java.net.URISyntaxException; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -69,7 +70,7 @@ public class ITUseMiniCluster { + "fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident," + " sunt in culpa qui officia deserunt mollit anim id est laborum."; - @Before + @BeforeEach public void clusterUp() throws IOException { final Configuration conf = new HdfsConfiguration(); cluster = new MiniDFSCluster.Builder(conf) @@ -84,7 +85,7 @@ public void clusterUp() throws IOException { yarnCluster.start(); } - @After + @AfterEach public void clusterDown() { if (cluster != null) { cluster.close(); @@ -111,7 +112,7 @@ public void simpleReadAfterWrite(final FileSystem fs) throws IOException { } try (FSDataInputStream in = fs.open(path)) { final String result = in.readUTF(); - Assert.assertEquals("Didn't read back text we wrote.", TEXT, result); + assertEquals(TEXT, result, "Didn't read back text we wrote."); } } --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org