This is an automated email from the ASF dual-hosted git repository.

zhangduo pushed a commit to branch branch-2.5
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2.5 by this push:
     new e81a6c790f2 HBASE-29819 Upgrade hbase-asyncfs to use junit5 (#7615)
e81a6c790f2 is described below

commit e81a6c790f219167b685edf691e832d153ba343f
Author: Duo Zhang <[email protected]>
AuthorDate: Mon Jan 12 17:37:41 2026 +0800

    HBASE-29819 Upgrade hbase-asyncfs to use junit5 (#7615)
    
    Signed-off-by: Istvan Toth <[email protected]>
    Reviewed-by: Liu Xiao <[email protected]>
    (cherry picked from commit 119c0ced73e3a3cd8e9fc067dfb710ec2bb098e2)
---
 hbase-asyncfs/pom.xml                              |  5 --
 .../io/asyncfs/TestExcludeDatanodeManager.java     | 18 ++--
 .../asyncfs/TestFanOutOneBlockAsyncDFSOutput.java  | 98 +++++++++-------------
 .../TestFanOutOneBlockAsyncDFSOutputHang.java      | 30 +++----
 .../hadoop/hbase/io/asyncfs/TestLeaseRenewal.java  | 25 +++---
 .../hbase/io/asyncfs/TestLocalAsyncOutput.java     | 17 ++--
 .../TestOverwriteFileUnderConstruction.java        | 39 ++++-----
 .../TestSaslFanOutOneBlockAsyncDFSOutput.java      | 77 ++++++++---------
 .../hbase/io/asyncfs/TestSendBufSizePredictor.java | 15 ++--
 .../hadoop/hbase/util/TestRecoverLeaseFSUtils.java | 54 ++++++------
 .../hbase/HBaseParameterizedInvocationContext.java | 55 ++++++++++++
 .../hbase/HBaseParameterizedParameterResolver.java | 50 +++++++++++
 .../hbase/HBaseParameterizedTemplateProvider.java  | 92 ++++++++++++++++++++
 .../hbase/HBaseParameterizedTestTemplate.java      | 38 +++++++++
 14 files changed, 391 insertions(+), 222 deletions(-)

diff --git a/hbase-asyncfs/pom.xml b/hbase-asyncfs/pom.xml
index 16b50ef4035..0d99473d035 100644
--- a/hbase-asyncfs/pom.xml
+++ b/hbase-asyncfs/pom.xml
@@ -83,11 +83,6 @@
       <artifactId>junit-jupiter-params</artifactId>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>org.junit.vintage</groupId>
-      <artifactId>junit-vintage-engine</artifactId>
-      <scope>test</scope>
-    </dependency>
     <dependency>
       <groupId>org.bouncycastle</groupId>
       <artifactId>bcprov-jdk18on</artifactId>
diff --git 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestExcludeDatanodeManager.java
 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestExcludeDatanodeManager.java
index f7ca1639ec6..2a6f517c837 100644
--- 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestExcludeDatanodeManager.java
+++ 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestExcludeDatanodeManager.java
@@ -17,27 +17,23 @@
  */
 package org.apache.hadoop.hbase.io.asyncfs;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.io.asyncfs.monitor.ExcludeDatanodeManager;
 import org.apache.hadoop.hbase.io.asyncfs.monitor.StreamSlowMonitor;
+import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
 
-@Category({ SmallTests.class })
+@Tag(MiscTests.TAG)
+@Tag(SmallTests.TAG)
 public class TestExcludeDatanodeManager {
 
-  @ClassRule
-  public static final HBaseClassTestRule CLASS_RULE =
-    HBaseClassTestRule.forClass(TestExcludeDatanodeManager.class);
-
   @Test
   public void testExcludeSlowDNBySpeed() {
     Configuration conf = HBaseConfiguration.create();
diff --git 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutput.java
 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutput.java
index f0910684edd..63be7bc9cf5 100644
--- 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutput.java
+++ 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutput.java
@@ -21,9 +21,9 @@ import static 
org.apache.hadoop.hbase.util.FutureUtils.consume;
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY;
 import static org.hamcrest.CoreMatchers.instanceOf;
 import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -38,7 +38,6 @@ import java.util.concurrent.ExecutionException;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.io.asyncfs.monitor.ExcludeDatanodeManager;
 import org.apache.hadoop.hbase.io.asyncfs.monitor.StreamSlowMonitor;
@@ -49,15 +48,12 @@ import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster.DataNodeProperties;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.ipc.RemoteException;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.FixMethodOrder;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
-import org.junit.runners.MethodSorters;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -67,14 +63,10 @@ import 
org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup;
 import org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoopGroup;
 import 
org.apache.hbase.thirdparty.io.netty.channel.socket.nio.NioSocketChannel;
 
-@FixMethodOrder(MethodSorters.NAME_ASCENDING)
-@Category({ MiscTests.class, MediumTests.class })
+@Tag(MiscTests.TAG)
+@Tag(MediumTests.TAG)
 public class TestFanOutOneBlockAsyncDFSOutput extends AsyncFSTestBase {
 
-  @ClassRule
-  public static final HBaseClassTestRule CLASS_RULE =
-    HBaseClassTestRule.forClass(TestFanOutOneBlockAsyncDFSOutput.class);
-
   private static final Logger LOG = 
LoggerFactory.getLogger(TestFanOutOneBlockAsyncDFSOutput.class);
   private static DistributedFileSystem FS;
   private static EventLoopGroup EVENT_LOOP_GROUP;
@@ -83,10 +75,9 @@ public class TestFanOutOneBlockAsyncDFSOutput extends 
AsyncFSTestBase {
 
   private static StreamSlowMonitor MONITOR;
 
-  @Rule
-  public TestName name = new TestName();
+  private Path file;
 
-  @BeforeClass
+  @BeforeAll
   public static void setUp() throws Exception {
     UTIL.getConfiguration().setInt(DFS_CLIENT_SOCKET_TIMEOUT_KEY, 
READ_TIMEOUT_MS);
     startMiniDFSCluster(3);
@@ -96,7 +87,7 @@ public class TestFanOutOneBlockAsyncDFSOutput extends 
AsyncFSTestBase {
     MONITOR = StreamSlowMonitor.create(UTIL.getConfiguration(), "testMonitor");
   }
 
-  @AfterClass
+  @AfterAll
   public static void tearDown() throws Exception {
     if (EVENT_LOOP_GROUP != null) {
       EVENT_LOOP_GROUP.shutdownGracefully().get();
@@ -136,13 +127,17 @@ public class TestFanOutOneBlockAsyncDFSOutput extends 
AsyncFSTestBase {
     }
   }
 
+  @BeforeEach
+  public void setUpBeforeEach(TestInfo testInfo) {
+    file = new Path("/" + testInfo.getTestMethod().get().getName());
+  }
+
   @Test
   public void test() throws IOException, InterruptedException, 
ExecutionException {
-    Path f = new Path("/" + name.getMethodName());
     EventLoop eventLoop = EVENT_LOOP_GROUP.next();
-    FanOutOneBlockAsyncDFSOutput out = 
FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f, true,
-      false, (short) 3, FS.getDefaultBlockSize(), eventLoop, CHANNEL_CLASS, 
MONITOR, true);
-    writeAndVerify(FS, f, out);
+    FanOutOneBlockAsyncDFSOutput out = 
FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, file,
+      true, false, (short) 3, FS.getDefaultBlockSize(), eventLoop, 
CHANNEL_CLASS, MONITOR, true);
+    writeAndVerify(FS, file, out);
   }
 
   /**
@@ -151,10 +146,9 @@ public class TestFanOutOneBlockAsyncDFSOutput extends 
AsyncFSTestBase {
    */
   @Test
   public void test0Recover() throws IOException, InterruptedException, 
ExecutionException {
-    Path f = new Path("/" + name.getMethodName());
     EventLoop eventLoop = EVENT_LOOP_GROUP.next();
-    FanOutOneBlockAsyncDFSOutput out = 
FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f, true,
-      false, (short) 3, FS.getDefaultBlockSize(), eventLoop, CHANNEL_CLASS, 
MONITOR, true);
+    FanOutOneBlockAsyncDFSOutput out = 
FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, file,
+      true, false, (short) 3, FS.getDefaultBlockSize(), eventLoop, 
CHANNEL_CLASS, MONITOR, true);
     byte[] b = new byte[10];
     Bytes.random(b);
     out.write(b, 0, b.length);
@@ -162,17 +156,12 @@ public class TestFanOutOneBlockAsyncDFSOutput extends 
AsyncFSTestBase {
     // restart one datanode which causes one connection broken
     CLUSTER.restartDataNode(0);
     out.write(b, 0, b.length);
-    try {
-      out.flush(false).get();
-      fail("flush should fail");
-    } catch (ExecutionException e) {
-      // we restarted one datanode so the flush should fail
-      LOG.info("expected exception caught", e);
-    }
+    ExecutionException e = assertThrows(ExecutionException.class, () -> 
out.flush(false).get());
+    LOG.info("expected exception caught", e);
     out.recoverAndClose(null);
-    assertEquals(b.length, FS.getFileStatus(f).getLen());
+    assertEquals(b.length, FS.getFileStatus(file).getLen());
     byte[] actual = new byte[b.length];
-    try (FSDataInputStream in = FS.open(f)) {
+    try (FSDataInputStream in = FS.open(file)) {
       in.readFully(actual);
     }
     assertArrayEquals(b, actual);
@@ -180,13 +169,12 @@ public class TestFanOutOneBlockAsyncDFSOutput extends 
AsyncFSTestBase {
 
   @Test
   public void testHeartbeat() throws IOException, InterruptedException, 
ExecutionException {
-    Path f = new Path("/" + name.getMethodName());
     EventLoop eventLoop = EVENT_LOOP_GROUP.next();
-    FanOutOneBlockAsyncDFSOutput out = 
FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f, true,
-      false, (short) 3, FS.getDefaultBlockSize(), eventLoop, CHANNEL_CLASS, 
MONITOR, true);
+    FanOutOneBlockAsyncDFSOutput out = 
FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, file,
+      true, false, (short) 3, FS.getDefaultBlockSize(), eventLoop, 
CHANNEL_CLASS, MONITOR, true);
     Thread.sleep(READ_TIMEOUT_MS * 2);
     // the connection to datanode should still alive.
-    writeAndVerify(FS, f, out);
+    writeAndVerify(FS, file, out);
   }
 
   /**
@@ -194,16 +182,13 @@ public class TestFanOutOneBlockAsyncDFSOutput extends 
AsyncFSTestBase {
    */
   @Test
   public void testCreateParentFailed() throws IOException {
-    Path f = new Path("/" + name.getMethodName() + "/test");
+    Path f = new Path(file, "test");
     EventLoop eventLoop = EVENT_LOOP_GROUP.next();
-    try {
-      FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f, true, false, 
(short) 3,
-        FS.getDefaultBlockSize(), eventLoop, CHANNEL_CLASS, MONITOR, true);
-      fail("should fail with parent does not exist");
-    } catch (RemoteException e) {
-      LOG.info("expected exception caught", e);
-      assertThat(e.unwrapRemoteException(), 
instanceOf(FileNotFoundException.class));
-    }
+    RemoteException e = assertThrows(RemoteException.class,
+      () -> FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f, true, 
false, (short) 3,
+        FS.getDefaultBlockSize(), eventLoop, CHANNEL_CLASS, MONITOR, true));
+    LOG.info("expected exception caught", e);
+    assertThat(e.unwrapRemoteException(), 
instanceOf(FileNotFoundException.class));
   }
 
   @Test
@@ -264,19 +249,18 @@ public class TestFanOutOneBlockAsyncDFSOutput extends 
AsyncFSTestBase {
 
   @Test
   public void testWriteLargeChunk() throws IOException, InterruptedException, 
ExecutionException {
-    Path f = new Path("/" + name.getMethodName());
     EventLoop eventLoop = EVENT_LOOP_GROUP.next();
-    FanOutOneBlockAsyncDFSOutput out = 
FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f, true,
-      false, (short) 3, 1024 * 1024 * 1024, eventLoop, CHANNEL_CLASS, MONITOR, 
true);
+    FanOutOneBlockAsyncDFSOutput out = 
FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, file,
+      true, false, (short) 3, 1024 * 1024 * 1024, eventLoop, CHANNEL_CLASS, 
MONITOR, true);
     byte[] b = new byte[50 * 1024 * 1024];
     Bytes.random(b);
     out.write(b);
     consume(out.flush(false));
     assertEquals(b.length, out.flush(false).get().longValue());
     out.close();
-    assertEquals(b.length, FS.getFileStatus(f).getLen());
+    assertEquals(b.length, FS.getFileStatus(file).getLen());
     byte[] actual = new byte[b.length];
-    try (FSDataInputStream in = FS.open(f)) {
+    try (FSDataInputStream in = FS.open(file)) {
       in.readFully(actual);
     }
     assertArrayEquals(b, actual);
diff --git 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutputHang.java
 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutputHang.java
index 7f6535a93a9..4f70aaab421 100644
--- 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutputHang.java
+++ 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestFanOutOneBlockAsyncDFSOutputHang.java
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hbase.io.asyncfs;
 
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import java.util.ArrayList;
 import java.util.Iterator;
@@ -28,7 +28,6 @@ import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.CyclicBarrier;
 import java.util.concurrent.ExecutionException;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.io.asyncfs.monitor.StreamSlowMonitor;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
@@ -37,13 +36,10 @@ import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster.DataNodeProperties;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -65,13 +61,10 @@ import 
org.apache.hbase.thirdparty.io.netty.channel.socket.nio.NioSocketChannel;
  * in this test class we use the default value for timeout which is 60 seconds 
and it is enough for
  * this test.
  */
-@Category({ MiscTests.class, MediumTests.class })
+@Tag(MiscTests.TAG)
+@Tag(MediumTests.TAG)
 public class TestFanOutOneBlockAsyncDFSOutputHang extends AsyncFSTestBase {
 
-  @ClassRule
-  public static final HBaseClassTestRule CLASS_RULE =
-    HBaseClassTestRule.forClass(TestFanOutOneBlockAsyncDFSOutputHang.class);
-
   private static final Logger LOG =
     LoggerFactory.getLogger(TestFanOutOneBlockAsyncDFSOutputHang.class);
 
@@ -85,10 +78,7 @@ public class TestFanOutOneBlockAsyncDFSOutputHang extends 
AsyncFSTestBase {
 
   private static FanOutOneBlockAsyncDFSOutput OUT;
 
-  @Rule
-  public TestName name = new TestName();
-
-  @BeforeClass
+  @BeforeAll
   public static void setUp() throws Exception {
     startMiniDFSCluster(2);
     FS = CLUSTER.getFileSystem();
@@ -101,7 +91,7 @@ public class TestFanOutOneBlockAsyncDFSOutputHang extends 
AsyncFSTestBase {
       FS.getDefaultBlockSize(), eventLoop, CHANNEL_CLASS, MONITOR, true);
   }
 
-  @AfterClass
+  @AfterAll
   public static void tearDown() throws Exception {
     if (OUT != null) {
       OUT.recoverAndClose(null);
diff --git 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestLeaseRenewal.java
 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestLeaseRenewal.java
index e8f7188518d..02c15e78591 100644
--- 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestLeaseRenewal.java
+++ 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestLeaseRenewal.java
@@ -17,8 +17,8 @@
  */
 package org.apache.hadoop.hbase.io.asyncfs;
 
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 import static org.mockito.Mockito.mockConstruction;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.verifyNoMoreInteractions;
@@ -27,7 +27,6 @@ import java.io.IOException;
 import java.lang.reflect.Method;
 import java.util.Optional;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.io.asyncfs.monitor.StreamSlowMonitor;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
@@ -35,11 +34,10 @@ import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.DFSOutputStream;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.DummyDFSOutputStream;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
 import org.mockito.MockedConstruction;
 
 import org.apache.hbase.thirdparty.io.netty.channel.Channel;
@@ -54,19 +52,16 @@ import 
org.apache.hbase.thirdparty.io.netty.channel.socket.nio.NioSocketChannel;
  * <p>
  * See HBASE-28955 for more details.
  */
-@Category({ MiscTests.class, MediumTests.class })
+@Tag(MiscTests.TAG)
+@Tag(MediumTests.TAG)
 public class TestLeaseRenewal extends AsyncFSTestBase {
 
-  @ClassRule
-  public static final HBaseClassTestRule CLASS_RULE =
-    HBaseClassTestRule.forClass(TestLeaseRenewal.class);
-
   private static DistributedFileSystem FS;
   private static EventLoopGroup EVENT_LOOP_GROUP;
   private static Class<? extends Channel> CHANNEL_CLASS;
   private static StreamSlowMonitor MONITOR;
 
-  @BeforeClass
+  @BeforeAll
   public static void setUp() throws Exception {
     startMiniDFSCluster(3);
     FS = CLUSTER.getFileSystem();
@@ -75,7 +70,7 @@ public class TestLeaseRenewal extends AsyncFSTestBase {
     MONITOR = StreamSlowMonitor.create(UTIL.getConfiguration(), "testMonitor");
   }
 
-  @AfterClass
+  @AfterAll
   public static void tearDown() throws Exception {
     if (EVENT_LOOP_GROUP != null) {
       EVENT_LOOP_GROUP.shutdownGracefully().get();
diff --git 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestLocalAsyncOutput.java
 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestLocalAsyncOutput.java
index ec87316e5f1..b6535cdcfd1 100644
--- 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestLocalAsyncOutput.java
+++ 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestLocalAsyncOutput.java
@@ -21,29 +21,24 @@ import java.io.IOException;
 import java.util.concurrent.ExecutionException;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
 import org.apache.hadoop.hbase.io.asyncfs.monitor.StreamSlowMonitor;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
 import org.apache.hadoop.hbase.util.CommonFSUtils;
-import org.junit.AfterClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
 
 import org.apache.hbase.thirdparty.io.netty.channel.Channel;
 import org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup;
 import org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoopGroup;
 import 
org.apache.hbase.thirdparty.io.netty.channel.socket.nio.NioSocketChannel;
 
-@Category({ MiscTests.class, SmallTests.class })
+@Tag(MiscTests.TAG)
+@Tag(SmallTests.TAG)
 public class TestLocalAsyncOutput {
 
-  @ClassRule
-  public static final HBaseClassTestRule CLASS_RULE =
-    HBaseClassTestRule.forClass(TestLocalAsyncOutput.class);
-
   private static EventLoopGroup GROUP = new NioEventLoopGroup();
 
   private static Class<? extends Channel> CHANNEL_CLASS = 
NioSocketChannel.class;
@@ -52,7 +47,7 @@ public class TestLocalAsyncOutput {
 
   private static StreamSlowMonitor MONITOR;
 
-  @AfterClass
+  @AfterAll
   public static void tearDownAfterClass() throws Exception {
     TEST_UTIL.cleanupTestDir();
     GROUP.shutdownGracefully().get();
diff --git 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestOverwriteFileUnderConstruction.java
 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestOverwriteFileUnderConstruction.java
index 7a3a6de10f0..a5bb0de9b33 100644
--- 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestOverwriteFileUnderConstruction.java
+++ 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestOverwriteFileUnderConstruction.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.io.asyncfs;
 
 import static org.hamcrest.CoreMatchers.instanceOf;
 import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -28,49 +28,40 @@ import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
 import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
 import org.apache.hadoop.ipc.RemoteException;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
 
 /**
  * Used to confirm that it is OK to overwrite a file which is being written 
currently.
  */
-@Category({ MiscTests.class, MediumTests.class })
+@Tag(MiscTests.TAG)
+@Tag(MediumTests.TAG)
 public class TestOverwriteFileUnderConstruction extends AsyncFSTestBase {
 
-  @ClassRule
-  public static final HBaseClassTestRule CLASS_RULE =
-    HBaseClassTestRule.forClass(TestOverwriteFileUnderConstruction.class);
-
   private static FileSystem FS;
 
-  @Rule
-  public final TestName name = new TestName();
-
-  @BeforeClass
+  @BeforeAll
   public static void setUp() throws Exception {
     startMiniDFSCluster(3);
     FS = CLUSTER.getFileSystem();
   }
 
-  @AfterClass
+  @AfterAll
   public static void tearDown() throws Exception {
     shutdownMiniDFSCluster();
   }
 
   @Test
-  public void testNotOverwrite() throws IOException {
-    Path file = new Path("/" + name.getMethodName());
+  public void testNotOverwrite(TestInfo testInfo) throws IOException {
+    Path file = new Path("/" + testInfo.getDisplayName());
     try (FSDataOutputStream out1 = FS.create(file)) {
       try {
         FS.create(file, false);
@@ -83,8 +74,8 @@ public class TestOverwriteFileUnderConstruction extends 
AsyncFSTestBase {
   }
 
   @Test
-  public void testOverwrite() throws IOException {
-    Path file = new Path("/" + name.getMethodName());
+  public void testOverwrite(TestInfo testInfo) throws IOException {
+    Path file = new Path("/" + testInfo.getDisplayName());
     FSDataOutputStream out1 = FS.create(file);
     FSDataOutputStream out2 = FS.create(file, true);
     out1.write(2);
diff --git 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java
 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java
index e178d0960ea..a0e56546f43 100644
--- 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java
+++ 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSaslFanOutOneBlockAsyncDFSOutput.java
@@ -32,6 +32,7 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
 import java.util.concurrent.ExecutionException;
+import java.util.stream.Stream;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -39,7 +40,7 @@ import org.apache.hadoop.crypto.CipherSuite;
 import org.apache.hadoop.crypto.key.KeyProvider;
 import org.apache.hadoop.crypto.key.KeyProviderFactory;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
 import org.apache.hadoop.hbase.io.asyncfs.monitor.StreamSlowMonitor;
 import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
 import org.apache.hadoop.hbase.security.SecurityConstants;
@@ -48,19 +49,14 @@ import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.minikdc.MiniKdc;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestInfo;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.params.provider.Arguments;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -70,17 +66,14 @@ import 
org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup;
 import org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoopGroup;
 import 
org.apache.hbase.thirdparty.io.netty.channel.socket.nio.NioSocketChannel;
 
-@RunWith(Parameterized.class)
-@Category({ MiscTests.class, LargeTests.class })
+@Tag(MiscTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "[{index}] protection = {0}, encryption 
= {1}, cipher = {2}")
 public class TestSaslFanOutOneBlockAsyncDFSOutput extends AsyncFSTestBase {
 
   private static final Logger LOG =
     LoggerFactory.getLogger(TestSaslFanOutOneBlockAsyncDFSOutput.class);
 
-  @ClassRule
-  public static final HBaseClassTestRule CLASS_RULE =
-    HBaseClassTestRule.forClass(TestSaslFanOutOneBlockAsyncDFSOutput.class);
-
   private static DistributedFileSystem FS;
 
   private static EventLoopGroup EVENT_LOOP_GROUP;
@@ -105,29 +98,27 @@ public class TestSaslFanOutOneBlockAsyncDFSOutput extends 
AsyncFSTestBase {
 
   private static StreamSlowMonitor MONITOR;
 
-  @Rule
-  public TestName name = new TestName();
-
-  @Parameter(0)
-  public String protection;
+  private String protection;
+  private String encryptionAlgorithm;
+  private String cipherSuite;
 
-  @Parameter(1)
-  public String encryptionAlgorithm;
-
-  @Parameter(2)
-  public String cipherSuite;
+  public TestSaslFanOutOneBlockAsyncDFSOutput(String protection, String 
encryptionAlgorithm,
+    String cipherSuite) {
+    this.protection = protection;
+    this.encryptionAlgorithm = encryptionAlgorithm;
+    this.cipherSuite = cipherSuite;
+  }
 
-  @Parameters(name = "{index}: protection={0}, encryption={1}, 
cipherSuite={2}")
-  public static Iterable<Object[]> data() {
-    List<Object[]> params = new ArrayList<>();
+  public static Stream<Arguments> parameters() {
+    List<Arguments> params = new ArrayList<>();
     for (String protection : Arrays.asList("authentication", "integrity", 
"privacy")) {
       for (String encryptionAlgorithm : Arrays.asList("", "3des", "rc4")) {
         for (String cipherSuite : Arrays.asList("", 
CipherSuite.AES_CTR_NOPADDING.getName())) {
-          params.add(new Object[] { protection, encryptionAlgorithm, 
cipherSuite });
+          params.add(Arguments.of(protection, encryptionAlgorithm, 
cipherSuite));
         }
       }
     }
-    return params;
+    return params.stream();
   }
 
   private static void setUpKeyProvider(Configuration conf) throws Exception {
@@ -175,7 +166,7 @@ public class TestSaslFanOutOneBlockAsyncDFSOutput extends 
AsyncFSTestBase {
     return kdc;
   }
 
-  @BeforeClass
+  @BeforeAll
   public static void setUpBeforeClass() throws Exception {
     EVENT_LOOP_GROUP = new NioEventLoopGroup();
     CHANNEL_CLASS = NioSocketChannel.class;
@@ -193,7 +184,7 @@ public class TestSaslFanOutOneBlockAsyncDFSOutput extends 
AsyncFSTestBase {
     MONITOR = StreamSlowMonitor.create(UTIL.getConfiguration(), "testMonitor");
   }
 
-  @AfterClass
+  @AfterAll
   public static void tearDownAfterClass() throws Exception {
     if (EVENT_LOOP_GROUP != null) {
       EVENT_LOOP_GROUP.shutdownGracefully().get();
@@ -214,8 +205,8 @@ public class TestSaslFanOutOneBlockAsyncDFSOutput extends 
AsyncFSTestBase {
     method.invoke(FS, entryptionTestDirOnTestFs, TEST_KEY_NAME);
   }
 
-  @Before
-  public void setUp() throws Exception {
+  @BeforeEach
+  public void setUp(TestInfo testInfo) throws Exception {
     UTIL.getConfiguration().set("dfs.data.transfer.protection", protection);
     if (StringUtils.isBlank(encryptionAlgorithm) && 
StringUtils.isBlank(cipherSuite)) {
       UTIL.getConfiguration().setBoolean(DFS_ENCRYPT_DATA_TRANSFER_KEY, false);
@@ -235,14 +226,14 @@ public class TestSaslFanOutOneBlockAsyncDFSOutput extends 
AsyncFSTestBase {
 
     startMiniDFSCluster(3);
     FS = CLUSTER.getFileSystem();
-    testDirOnTestFs = new Path("/" + 
name.getMethodName().replaceAll("[^0-9a-zA-Z]", "_"));
+    testDirOnTestFs = new Path("/" + 
testInfo.getDisplayName().replaceAll("[^0-9a-zA-Z]", "_"));
     FS.mkdirs(testDirOnTestFs);
     entryptionTestDirOnTestFs = new Path("/" + testDirOnTestFs.getName() + 
"_enc");
     FS.mkdirs(entryptionTestDirOnTestFs);
     createEncryptionZone();
   }
 
-  @After
+  @AfterEach
   public void tearDown() throws IOException {
     shutdownMiniDFSCluster();
   }
@@ -262,8 +253,8 @@ public class TestSaslFanOutOneBlockAsyncDFSOutput extends 
AsyncFSTestBase {
     TestFanOutOneBlockAsyncDFSOutput.writeAndVerify(FS, file, out);
   }
 
-  @Test
-  public void test() throws IOException, InterruptedException, 
ExecutionException {
+  @TestTemplate
+  public void test() throws Exception {
     test(getTestFile());
     test(getEncryptionTestFile());
   }
diff --git 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSendBufSizePredictor.java
 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSendBufSizePredictor.java
index 07fc3afbf2f..1ddf6d64f96 100644
--- 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSendBufSizePredictor.java
+++ 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/io/asyncfs/TestSendBufSizePredictor.java
@@ -17,22 +17,17 @@
  */
 package org.apache.hadoop.hbase.io.asyncfs;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
 
-import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
 
-@Category({ MiscTests.class, SmallTests.class })
+@Tag(MiscTests.TAG)
+@Tag(SmallTests.TAG)
 public class TestSendBufSizePredictor {
 
-  @ClassRule
-  public static final HBaseClassTestRule CLASS_RULE =
-    HBaseClassTestRule.forClass(TestSendBufSizePredictor.class);
-
   @Test
   public void test() {
     SendBufSizePredictor p = new SendBufSizePredictor();
diff --git 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/util/TestRecoverLeaseFSUtils.java
 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/util/TestRecoverLeaseFSUtils.java
index 953d66b3fa4..cad73b58b14 100644
--- 
a/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/util/TestRecoverLeaseFSUtils.java
+++ 
b/hbase-asyncfs/src/test/java/org/apache/hadoop/hbase/util/TestRecoverLeaseFSUtils.java
@@ -17,40 +17,42 @@
  */
 package org.apache.hadoop.hbase.util;
 
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
 import java.io.IOException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.mockito.Mockito;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
 
 /**
  * Test our recoverLease loop against mocked up filesystem.
  */
-@Category({ MiscTests.class, MediumTests.class })
+@Tag(MiscTests.TAG)
+@Tag(MediumTests.TAG)
 public class TestRecoverLeaseFSUtils {
 
-  @ClassRule
-  public static final HBaseClassTestRule CLASS_RULE =
-    HBaseClassTestRule.forClass(TestRecoverLeaseFSUtils.class);
-
   private static final HBaseCommonTestingUtility HTU = new 
HBaseCommonTestingUtility();
-  static {
+
+  private static Path FILE;
+
+  @BeforeAll
+  public static void setUp() {
     Configuration conf = HTU.getConfiguration();
     conf.setInt("hbase.lease.recovery.first.pause", 10);
     conf.setInt("hbase.lease.recovery.pause", 10);
+    FILE = new Path(HTU.getDataTestDir(), "file.txt");
   }
 
-  private static Path FILE = new Path(HTU.getDataTestDir(), "file.txt");
-
   /**
    * Test recover lease eventually succeeding.
    */
@@ -58,14 +60,14 @@ public class TestRecoverLeaseFSUtils {
   public void testRecoverLease() throws IOException {
     long startTime = EnvironmentEdgeManager.currentTime();
     HTU.getConfiguration().setInt("hbase.lease.recovery.dfs.timeout", 1000);
-    CancelableProgressable reporter = 
Mockito.mock(CancelableProgressable.class);
-    Mockito.when(reporter.progress()).thenReturn(true);
-    DistributedFileSystem dfs = Mockito.mock(DistributedFileSystem.class);
+    CancelableProgressable reporter = mock(CancelableProgressable.class);
+    when(reporter.progress()).thenReturn(true);
+    DistributedFileSystem dfs = mock(DistributedFileSystem.class);
     // Fail four times and pass on the fifth.
-    
Mockito.when(dfs.recoverLease(FILE)).thenReturn(false).thenReturn(false).thenReturn(false)
+    
when(dfs.recoverLease(FILE)).thenReturn(false).thenReturn(false).thenReturn(false)
       .thenReturn(false).thenReturn(true);
     RecoverLeaseFSUtils.recoverFileLease(dfs, FILE, HTU.getConfiguration(), 
reporter);
-    Mockito.verify(dfs, Mockito.times(5)).recoverLease(FILE);
+    verify(dfs, times(5)).recoverLease(FILE);
     // Make sure we waited at least hbase.lease.recovery.dfs.timeout * 3 (the 
first two
     // invocations will happen pretty fast... the we fall into the longer wait 
loop).
     assertTrue((EnvironmentEdgeManager.currentTime() - startTime)
@@ -79,17 +81,17 @@ public class TestRecoverLeaseFSUtils {
   public void testIsFileClosed() throws IOException {
     // Make this time long so it is plain we broke out because of the 
isFileClosed invocation.
     HTU.getConfiguration().setInt("hbase.lease.recovery.dfs.timeout", 100000);
-    CancelableProgressable reporter = 
Mockito.mock(CancelableProgressable.class);
-    Mockito.when(reporter.progress()).thenReturn(true);
-    IsFileClosedDistributedFileSystem dfs = 
Mockito.mock(IsFileClosedDistributedFileSystem.class);
+    CancelableProgressable reporter = mock(CancelableProgressable.class);
+    when(reporter.progress()).thenReturn(true);
+    IsFileClosedDistributedFileSystem dfs = 
mock(IsFileClosedDistributedFileSystem.class);
     // Now make it so we fail the first two times -- the two fast invocations, 
then we fall into
     // the long loop during which we will call isFileClosed.... the next 
invocation should
     // therefore return true if we are to break the loop.
-    
Mockito.when(dfs.recoverLease(FILE)).thenReturn(false).thenReturn(false).thenReturn(true);
-    Mockito.when(dfs.isFileClosed(FILE)).thenReturn(true);
+    
when(dfs.recoverLease(FILE)).thenReturn(false).thenReturn(false).thenReturn(true);
+    when(dfs.isFileClosed(FILE)).thenReturn(true);
     RecoverLeaseFSUtils.recoverFileLease(dfs, FILE, HTU.getConfiguration(), 
reporter);
-    Mockito.verify(dfs, Mockito.times(2)).recoverLease(FILE);
-    Mockito.verify(dfs, Mockito.times(1)).isFileClosed(FILE);
+    verify(dfs, times(2)).recoverLease(FILE);
+    verify(dfs, times(1)).isFileClosed(FILE);
   }
 
   /**
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedInvocationContext.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedInvocationContext.java
new file mode 100644
index 00000000000..e13c7e58c80
--- /dev/null
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedInvocationContext.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.util.Collections;
+import java.util.List;
+import org.junit.jupiter.api.extension.Extension;
+import org.junit.jupiter.api.extension.TestTemplateInvocationContext;
+import org.junit.jupiter.params.provider.Arguments;
+
+/**
+ * @see HBaseParameterizedTestTemplate
+ */
+public class HBaseParameterizedInvocationContext implements 
TestTemplateInvocationContext {
+
+  private final Arguments arguments;
+
+  private final String namePattern;
+
+  HBaseParameterizedInvocationContext(Arguments arguments, String namePattern) 
{
+    this.arguments = arguments;
+    this.namePattern = namePattern;
+  }
+
+  @Override
+  public String getDisplayName(int invocationIndex) {
+    String result = namePattern.replace("{index}", 
String.valueOf(invocationIndex));
+
+    Object[] args = arguments.get();
+    for (int i = 0; i < args.length; i++) {
+      result = result.replace("{" + i + "}", String.valueOf(args[i]));
+    }
+    return result;
+  }
+
+  @Override
+  public List<Extension> getAdditionalExtensions() {
+    return Collections.singletonList(new 
HBaseParameterizedParameterResolver(arguments));
+  }
+}
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java
new file mode 100644
index 00000000000..de17960d151
--- /dev/null
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedParameterResolver.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import org.junit.jupiter.api.extension.ExtensionContext;
+import org.junit.jupiter.api.extension.ParameterContext;
+import org.junit.jupiter.api.extension.ParameterResolutionException;
+import org.junit.jupiter.api.extension.ParameterResolver;
+import org.junit.jupiter.params.provider.Arguments;
+
+/**
+ * @see HBaseParameterizedTestTemplate
+ */
+public class HBaseParameterizedParameterResolver implements ParameterResolver {
+
+  private final Object[] values;
+
+  HBaseParameterizedParameterResolver(Arguments arguments) {
+    this.values = arguments.get();
+  }
+
+  @Override
+  public boolean supportsParameter(ParameterContext pc, ExtensionContext ec)
+    throws ParameterResolutionException {
+    int index = pc.getIndex();
+    return index < values.length
+      && 
pc.getParameter().getType().isAssignableFrom(values[index].getClass());
+  }
+
+  @Override
+  public Object resolveParameter(ParameterContext pc, ExtensionContext ec)
+    throws ParameterResolutionException {
+    return values[pc.getIndex()];
+  }
+}
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedTemplateProvider.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedTemplateProvider.java
new file mode 100644
index 00000000000..90ec0d0d1a6
--- /dev/null
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedTemplateProvider.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.util.stream.Stream;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.junit.jupiter.api.extension.ExtensionConfigurationException;
+import org.junit.jupiter.api.extension.ExtensionContext;
+import org.junit.jupiter.api.extension.TestTemplateInvocationContext;
+import org.junit.jupiter.api.extension.TestTemplateInvocationContextProvider;
+import org.junit.jupiter.params.provider.Arguments;
+
+/**
+ * The entry point class for supporting JUnit4 like Parameterized test, where 
we can use constructor
+ * to pass parameters.
+ * <p>
+ * JUnit5's {@link org.junit.jupiter.params.ParameterizedClass} will create 
separated test classes,
+ * which is different with JUnit4 and {@link 
org.junit.jupiter.params.ParameterizedTest} does not
+ * support passing parameters through constructors.
+ * <p>
+ * When you want to use this provider, annotation the test class with
+ * {@link HBaseParameterizedTestTemplate}, and provide a static method named 
"parameters" for
+ * providing the arguments. The method must have no parameter, and return a 
Stream&lt;Arguments&gt;.
+ * All the test method should be marked with {@link 
org.junit.jupiter.api.TestTemplate}, not
+ * {@link org.junit.jupiter.api.Test} or {@link 
org.junit.jupiter.params.ParameterizedTest}.
+ * @see HBaseParameterizedTestTemplate
+ * @see HBaseParameterizedInvocationContext
+ * @see HBaseParameterizedParameterResolver
+ */
[email protected]
+public class HBaseParameterizedTemplateProvider implements 
TestTemplateInvocationContextProvider {
+
+  private static final String PARAMETERS_METHOD_NAME = "parameters";
+
+  @Override
+  public boolean supportsTestTemplate(ExtensionContext context) {
+    return context.getTestClass()
+      .map(c -> 
c.isAnnotationPresent(HBaseParameterizedTestTemplate.class)).orElse(false);
+  }
+
+  @Override
+  public Stream<TestTemplateInvocationContext>
+    provideTestTemplateInvocationContexts(ExtensionContext context) {
+    Class<?> testClass = context.getRequiredTestClass();
+    // get parameters
+    Method method;
+    try {
+      method = testClass.getDeclaredMethod(PARAMETERS_METHOD_NAME);
+    } catch (NoSuchMethodException e) {
+      throw new ExtensionConfigurationException(
+        "Test class must declare static " + PARAMETERS_METHOD_NAME + " 
method");
+    }
+
+    if (!Modifier.isStatic(method.getModifiers())) {
+      throw new ExtensionConfigurationException(PARAMETERS_METHOD_NAME + " 
method must be static");
+    }
+    if (method.getParameterCount() > 0) {
+      throw new ExtensionConfigurationException(
+        PARAMETERS_METHOD_NAME + " method must not have any parameters");
+    }
+
+    Stream<Arguments> args;
+    try {
+      args = (Stream<Arguments>) method.invoke(null);
+    } catch (IllegalAccessException | InvocationTargetException e) {
+      throw new ExtensionConfigurationException("failed to invoke parameters 
method", e);
+    }
+    // get display name
+    String namePattern = 
testClass.getAnnotation(HBaseParameterizedTestTemplate.class).name();
+
+    return args.map(arg -> new HBaseParameterizedInvocationContext(arg, 
namePattern));
+  }
+
+}
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedTestTemplate.java
 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedTestTemplate.java
new file mode 100644
index 00000000000..425869f98e0
--- /dev/null
+++ 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/HBaseParameterizedTestTemplate.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.junit.jupiter.api.extension.ExtendWith;
+
+/**
+ * The annotation for running JUnit4 like parameterized tests with JUnit5.
+ * @see HBaseParameterizedTestTemplate
+ */
+@Target(ElementType.TYPE)
+@Retention(RetentionPolicy.RUNTIME)
+@ExtendWith(HBaseParameterizedTemplateProvider.class)
[email protected]
+public @interface HBaseParameterizedTestTemplate {
+
+  String name() default "[{index}]";
+}

Reply via email to