This is an automated email from the ASF dual-hosted git repository.

slfan1989 pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 809b1b59738 HADOOP-19427. [JDK17]  Upgrade JUnit from 4 to 5 in 
hadoop-compat-bench. (#7619)
809b1b59738 is described below

commit 809b1b59738e6644851c1ab194cc3bf7aec55ab8
Author: slfan1989 <55643692+slfan1...@users.noreply.github.com>
AuthorDate: Thu Apr 24 06:15:49 2025 +0800

    HADOOP-19427. [JDK17]  Upgrade JUnit from 4 to 5 in hadoop-compat-bench. 
(#7619)
    
    * HADOOP-19427. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-compat-bench.
    
    Co-authored-by: Chris Nauroth <cnaur...@apache.org>
    Reviewed-by: Chris Nauroth <cnaur...@apache.org>
    Signed-off-by: Shilun Fan <slfan1...@apache.org>
---
 hadoop-tools/hadoop-compat-bench/pom.xml           | 25 ++++++++++++++++++++++
 .../compat/common/TestHdfsCompatDefaultSuites.java |  9 ++++----
 .../fs/compat/common/TestHdfsCompatFsCommand.java  | 16 +++++++-------
 .../common/TestHdfsCompatInterfaceCoverage.java    | 12 +++++------
 .../compat/common/TestHdfsCompatShellCommand.java  | 20 ++++++++---------
 5 files changed, 54 insertions(+), 28 deletions(-)

diff --git a/hadoop-tools/hadoop-compat-bench/pom.xml 
b/hadoop-tools/hadoop-compat-bench/pom.xml
index ed032b3db9b..ee93c7cf187 100644
--- a/hadoop-tools/hadoop-compat-bench/pom.xml
+++ b/hadoop-tools/hadoop-compat-bench/pom.xml
@@ -69,6 +69,31 @@
       <artifactId>mockito-inline</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.junit.jupiter</groupId>
+      <artifactId>junit-jupiter-api</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.junit.jupiter</groupId>
+      <artifactId>junit-jupiter-engine</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.junit.jupiter</groupId>
+      <artifactId>junit-jupiter-params</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.junit.platform</groupId>
+      <artifactId>junit-platform-launcher</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.junit.vintage</groupId>
+      <artifactId>junit-vintage-engine</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
   <build>
diff --git 
a/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatDefaultSuites.java
 
b/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatDefaultSuites.java
index 882d1fe8ef9..8ddd205a498 100644
--- 
a/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatDefaultSuites.java
+++ 
b/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatDefaultSuites.java
@@ -17,12 +17,13 @@
  */
 package org.apache.hadoop.fs.compat.common;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
 import org.apache.hadoop.fs.compat.HdfsCompatTool;
 import org.apache.hadoop.fs.compat.hdfs.HdfsCompatMiniCluster;
 import org.apache.hadoop.fs.compat.hdfs.HdfsCompatTestCommand;
 import org.apache.hadoop.conf.Configuration;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 public class TestHdfsCompatDefaultSuites {
   @Test
@@ -35,7 +36,7 @@ public void testSuiteAll() throws Exception {
       HdfsCompatCommand cmd = new HdfsCompatTestCommand(uri, "ALL", conf);
       cmd.initialize();
       HdfsCompatReport report = cmd.apply();
-      Assert.assertEquals(0, report.getFailedCase().size());
+      assertEquals(0, report.getFailedCase().size());
       new HdfsCompatTool(conf).printReport(report, System.out);
     } finally {
       cluster.shutdown();
@@ -52,7 +53,7 @@ public void testSuiteTpcds() throws Exception {
       HdfsCompatCommand cmd = new HdfsCompatTestCommand(uri, "TPCDS", conf);
       cmd.initialize();
       HdfsCompatReport report = cmd.apply();
-      Assert.assertEquals(0, report.getFailedCase().size());
+      assertEquals(0, report.getFailedCase().size());
       new HdfsCompatTool(conf).printReport(report, System.out);
     } finally {
       cluster.shutdown();
diff --git 
a/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatFsCommand.java
 
b/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatFsCommand.java
index c2d3b0260d0..a8a6c7039e5 100644
--- 
a/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatFsCommand.java
+++ 
b/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatFsCommand.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.fs.compat.common;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import org.apache.hadoop.fs.compat.HdfsCompatTool;
 import org.apache.hadoop.fs.compat.hdfs.HdfsCompatMiniCluster;
@@ -25,8 +26,7 @@
 import org.apache.hadoop.fs.compat.cases.HdfsCompatMkdirTestCases;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
 
 import java.io.IOException;
 import java.lang.reflect.Field;
@@ -47,8 +47,8 @@ public void testDfsCompatibility() throws Exception {
       HdfsCompatCommand cmd = new TestCommand(uri, suite, conf);
       cmd.initialize();
       HdfsCompatReport report = cmd.apply();
-      Assert.assertEquals(7, report.getPassedCase().size());
-      Assert.assertEquals(0, report.getFailedCase().size());
+      assertEquals(7, report.getPassedCase().size());
+      assertEquals(0, report.getFailedCase().size());
       show(conf, report);
     } finally {
       if (cluster != null) {
@@ -65,8 +65,8 @@ public void testLocalFsCompatibility() throws Exception {
     HdfsCompatCommand cmd = new TestCommand(uri, suite, conf);
     cmd.initialize();
     HdfsCompatReport report = cmd.apply();
-    Assert.assertEquals(1, report.getPassedCase().size());
-    Assert.assertEquals(6, report.getFailedCase().size());
+    assertEquals(1, report.getPassedCase().size());
+    assertEquals(6, report.getFailedCase().size());
     show(conf, report);
     cleanup(cmd, conf);
   }
@@ -79,8 +79,8 @@ public void testFsCompatibilityWithSuite() throws Exception {
     HdfsCompatCommand cmd = new TestCommand(uri, suite, conf);
     cmd.initialize();
     HdfsCompatReport report = cmd.apply();
-    Assert.assertEquals(0, report.getPassedCase().size());
-    Assert.assertEquals(6, report.getFailedCase().size());
+    assertEquals(0, report.getPassedCase().size());
+    assertEquals(6, report.getFailedCase().size());
     show(conf, report);
     cleanup(cmd, conf);
   }
diff --git 
a/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatInterfaceCoverage.java
 
b/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatInterfaceCoverage.java
index cbee71d867a..af1746a9fa0 100644
--- 
a/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatInterfaceCoverage.java
+++ 
b/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatInterfaceCoverage.java
@@ -17,12 +17,12 @@
  */
 package org.apache.hadoop.fs.compat.common;
 
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import org.apache.hadoop.fs.compat.cases.HdfsCompatBasics;
 import org.apache.hadoop.fs.FileSystem;
-import org.junit.Assert;
-import org.junit.Ignore;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Disabled;
 
 import java.lang.reflect.Method;
 import java.util.HashSet;
@@ -30,13 +30,13 @@
 
 public class TestHdfsCompatInterfaceCoverage {
   @Test
-  @Ignore
+  @Disabled
   public void testFsCompatibility() {
     Set<String> publicMethods = getPublicInterfaces(FileSystem.class);
     Set<String> targets = getTargets(HdfsCompatBasics.class);
     for (String publicMethod : publicMethods) {
-      Assert.assertTrue("Method not tested: " + publicMethod,
-          targets.contains(publicMethod));
+      assertTrue(targets.contains(publicMethod),
+          "Method not tested: " + publicMethod);
     }
   }
 
diff --git 
a/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatShellCommand.java
 
b/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatShellCommand.java
index 2602a6fab12..6d4589e9132 100644
--- 
a/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatShellCommand.java
+++ 
b/hadoop-tools/hadoop-compat-bench/src/test/java/org/apache/hadoop/fs/compat/common/TestHdfsCompatShellCommand.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.fs.compat.common;
 
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.compat.HdfsCompatTool;
@@ -24,10 +25,9 @@
 import org.apache.hadoop.fs.compat.hdfs.HdfsCompatTestCommand;
 import org.apache.hadoop.fs.compat.hdfs.HdfsCompatTestShellScope;
 import org.apache.hadoop.conf.Configuration;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
 
 import java.io.File;
 import java.io.IOException;
@@ -36,13 +36,13 @@
 public class TestHdfsCompatShellCommand {
   private HdfsCompatMiniCluster cluster;
 
-  @Before
+  @BeforeEach
   public void runCluster() throws IOException {
     this.cluster = new HdfsCompatMiniCluster();
     this.cluster.start();
   }
 
-  @After
+  @AfterEach
   public void shutdownCluster() {
     this.cluster.shutdown();
     this.cluster = null;
@@ -55,8 +55,8 @@ public void testDfsCompatibility() throws Exception {
     HdfsCompatCommand cmd = new TestCommand(uri, conf);
     cmd.initialize();
     HdfsCompatReport report = cmd.apply();
-    Assert.assertEquals(3, report.getPassedCase().size());
-    Assert.assertEquals(0, report.getFailedCase().size());
+    assertEquals(3, report.getPassedCase().size());
+    assertEquals(0, report.getFailedCase().size());
     show(conf, report);
   }
 
@@ -67,8 +67,8 @@ public void testSkipCompatibility() throws Exception {
     HdfsCompatCommand cmd = new TestSkipCommand(uri, conf);
     cmd.initialize();
     HdfsCompatReport report = cmd.apply();
-    Assert.assertEquals(2, report.getPassedCase().size());
-    Assert.assertEquals(0, report.getFailedCase().size());
+    assertEquals(2, report.getPassedCase().size());
+    assertEquals(0, report.getFailedCase().size());
     show(conf, report);
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to