zentol commented on code in PR #18983:
URL: https://github.com/apache/flink/pull/18983#discussion_r873652519


##########
flink-yarn/src/test/java/org/apache/flink/yarn/TaskExecutorProcessSpecContainerResourcePriorityAdapterTest.java:
##########
@@ -206,25 +197,27 @@ public void testExternalResource() {
 
         final Map<String, Long> resultExternalResources =
                 
ResourceInformationReflector.INSTANCE.getExternalResources(resource);
-        assertThat(resultExternalResources.size(), is(1));
-        assertThat(
-                
resultExternalResources.get(SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY),
-                is(SUPPORTED_EXTERNAL_RESOURCE_MAX));
+        assertThat(resultExternalResources)
+                .hasSize(1)
+                .containsEntry(
+                        SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY, 
SUPPORTED_EXTERNAL_RESOURCE_MAX);
     }
 
-    @Test(expected = IllegalStateException.class)
-    public void testExternalResourceFailExceedMax() {
-        assumeTrue(isExternalResourceSupported());
+    @Test
+    void testExternalResourceFailExceedMax() {
+
+        assumeThat(isExternalResourceSupported()).isTrue();
 
         getAdapterWithExternalResources(
                         SUPPORTED_EXTERNAL_RESOURCE_NAME, 
SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY)
                 .getPriorityAndResource(
                         
TASK_EXECUTOR_PROCESS_SPEC_WITH_EXTERNAL_RESOURCE_EXCEED_MAX);
     }
 
-    @Test(expected = IllegalStateException.class)
-    public void testExternalResourceFailResourceTypeNotSupported() {
-        assumeTrue(isExternalResourceSupported());
+    @Test
+    void testExternalResourceFailResourceTypeNotSupported() {
+
+        assumeThat(isExternalResourceSupported()).isTrue();
 
         getAdapterWithExternalResources(

Review Comment:
   Missing assertThatThrownBy



##########
flink-yarn/src/test/java/org/apache/flink/yarn/TaskExecutorProcessSpecContainerResourcePriorityAdapterTest.java:
##########
@@ -233,13 +226,18 @@ public void 
testExternalResourceFailResourceTypeNotSupported() {
                         
TASK_EXECUTOR_PROCESS_SPEC_WITH_UNSUPPORTED_EXTERNAL_RESOURCE);
     }
 
-    @Test(expected = IllegalStateException.class)
-    public void testExternalResourceFailHadoopVersionNotSupported() {
-        assumeFalse(isExternalResourceSupported());
-
-        getAdapterWithExternalResources(
-                        SUPPORTED_EXTERNAL_RESOURCE_NAME, 
SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY)
-                
.getPriorityAndResource(TASK_EXECUTOR_PROCESS_SPEC_WITH_EXTERNAL_RESOURCE);
+    @Test
+    void testExternalResourceFailHadoopVersionNotSupported() {
+

Review Comment:
   ```suggestion
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/TaskExecutorProcessSpecContainerResourcePriorityAdapterTest.java:
##########
@@ -206,25 +197,27 @@ public void testExternalResource() {
 
         final Map<String, Long> resultExternalResources =
                 
ResourceInformationReflector.INSTANCE.getExternalResources(resource);
-        assertThat(resultExternalResources.size(), is(1));
-        assertThat(
-                
resultExternalResources.get(SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY),
-                is(SUPPORTED_EXTERNAL_RESOURCE_MAX));
+        assertThat(resultExternalResources)
+                .hasSize(1)
+                .containsEntry(
+                        SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY, 
SUPPORTED_EXTERNAL_RESOURCE_MAX);
     }
 
-    @Test(expected = IllegalStateException.class)
-    public void testExternalResourceFailExceedMax() {
-        assumeTrue(isExternalResourceSupported());
+    @Test
+    void testExternalResourceFailExceedMax() {
+
+        assumeThat(isExternalResourceSupported()).isTrue();
 
         getAdapterWithExternalResources(

Review Comment:
   Missing assertThatThrownBy



##########
flink-yarn/src/test/java/org/apache/flink/yarn/TaskExecutorProcessSpecContainerResourcePriorityAdapterTest.java:
##########
@@ -206,25 +197,27 @@ public void testExternalResource() {
 
         final Map<String, Long> resultExternalResources =
                 
ResourceInformationReflector.INSTANCE.getExternalResources(resource);
-        assertThat(resultExternalResources.size(), is(1));
-        assertThat(
-                
resultExternalResources.get(SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY),
-                is(SUPPORTED_EXTERNAL_RESOURCE_MAX));
+        assertThat(resultExternalResources)
+                .hasSize(1)
+                .containsEntry(
+                        SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY, 
SUPPORTED_EXTERNAL_RESOURCE_MAX);
     }
 
-    @Test(expected = IllegalStateException.class)
-    public void testExternalResourceFailExceedMax() {
-        assumeTrue(isExternalResourceSupported());
+    @Test
+    void testExternalResourceFailExceedMax() {
+
+        assumeThat(isExternalResourceSupported()).isTrue();
 
         getAdapterWithExternalResources(
                         SUPPORTED_EXTERNAL_RESOURCE_NAME, 
SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY)
                 .getPriorityAndResource(
                         
TASK_EXECUTOR_PROCESS_SPEC_WITH_EXTERNAL_RESOURCE_EXCEED_MAX);
     }
 
-    @Test(expected = IllegalStateException.class)
-    public void testExternalResourceFailResourceTypeNotSupported() {
-        assumeTrue(isExternalResourceSupported());
+    @Test
+    void testExternalResourceFailResourceTypeNotSupported() {
+

Review Comment:
   ```suggestion
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java:
##########
@@ -24,79 +24,75 @@
 import org.apache.flink.core.fs.FileSystem;
 import org.apache.flink.core.fs.Path;
 import org.apache.flink.runtime.fs.hdfs.HadoopFileSystem;
-import org.apache.flink.testutils.junit.RetryOnFailure;
-import org.apache.flink.testutils.junit.RetryRule;
+import org.apache.flink.testutils.junit.RetryOnException;
+import org.apache.flink.testutils.junit.extensions.retry.RetryExtension;
 import org.apache.flink.testutils.s3.S3TestCredentials;
-import org.apache.flink.util.TestLogger;
 
 import org.apache.hadoop.util.VersionUtil;
-import org.junit.AfterClass;
-import org.junit.Assume;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.api.io.TempDir;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.nio.file.Files;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.UUID;
 
-import static org.hamcrest.Matchers.greaterThan;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assume.assumeFalse;
-import static org.junit.Assume.assumeNoException;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assumptions.assumeThat;
+import static org.assertj.core.api.Assumptions.assumeThatThrownBy;
 
 /**
  * Tests for verifying file staging during submission to YARN works with the 
S3A file system.
  *
  * <p>Note that the setup is similar to
  * <tt>org.apache.flink.fs.s3hadoop.HadoopS3FileSystemITCase</tt>.
  */
-public class YarnFileStageTestS3ITCase extends TestLogger {
+@ExtendWith(RetryExtension.class)
+public class YarnFileStageTestS3ITCase {

Review Comment:
   ```suggestion
   class YarnFileStageTestS3ITCase {
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/TaskExecutorProcessSpecContainerResourcePriorityAdapterTest.java:
##########
@@ -206,25 +197,27 @@ public void testExternalResource() {
 
         final Map<String, Long> resultExternalResources =
                 
ResourceInformationReflector.INSTANCE.getExternalResources(resource);
-        assertThat(resultExternalResources.size(), is(1));
-        assertThat(
-                
resultExternalResources.get(SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY),
-                is(SUPPORTED_EXTERNAL_RESOURCE_MAX));
+        assertThat(resultExternalResources)
+                .hasSize(1)
+                .containsEntry(
+                        SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY, 
SUPPORTED_EXTERNAL_RESOURCE_MAX);
     }
 
-    @Test(expected = IllegalStateException.class)
-    public void testExternalResourceFailExceedMax() {
-        assumeTrue(isExternalResourceSupported());
+    @Test
+    void testExternalResourceFailExceedMax() {
+

Review Comment:
   ```suggestion
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java:
##########
@@ -173,36 +172,27 @@ private void testRecursiveUploadForYarn(String scheme, 
String pathSuffix) throws
         }
     }
 
-    @Test
-    @RetryOnFailure(times = 3)
-    public void testRecursiveUploadForYarnS3n() throws Exception {
+    @TestTemplate
+    @RetryOnException(times = 3, exception = Exception.class)
+    public void testRecursiveUploadForYarnS3n(@TempDir File tempFolder) throws 
Exception {

Review Comment:
   ```suggestion
       void testRecursiveUploadForYarnS3n(@TempDir File tempFolder) throws 
Exception {
   ```
   also applies to other tests



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java:
##########
@@ -24,79 +24,75 @@
 import org.apache.flink.core.fs.FileSystem;
 import org.apache.flink.core.fs.Path;
 import org.apache.flink.runtime.fs.hdfs.HadoopFileSystem;
-import org.apache.flink.testutils.junit.RetryOnFailure;
-import org.apache.flink.testutils.junit.RetryRule;
+import org.apache.flink.testutils.junit.RetryOnException;
+import org.apache.flink.testutils.junit.extensions.retry.RetryExtension;
 import org.apache.flink.testutils.s3.S3TestCredentials;
-import org.apache.flink.util.TestLogger;
 
 import org.apache.hadoop.util.VersionUtil;
-import org.junit.AfterClass;
-import org.junit.Assume;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.api.io.TempDir;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.nio.file.Files;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.UUID;
 
-import static org.hamcrest.Matchers.greaterThan;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assume.assumeFalse;
-import static org.junit.Assume.assumeNoException;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assumptions.assumeThat;
+import static org.assertj.core.api.Assumptions.assumeThatThrownBy;
 
 /**
  * Tests for verifying file staging during submission to YARN works with the 
S3A file system.
  *
  * <p>Note that the setup is similar to
  * <tt>org.apache.flink.fs.s3hadoop.HadoopS3FileSystemITCase</tt>.
  */
-public class YarnFileStageTestS3ITCase extends TestLogger {
+@ExtendWith(RetryExtension.class)
+public class YarnFileStageTestS3ITCase {
 
-    private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
-
-    @ClassRule public static final TemporaryFolder TEMP_FOLDER = new 
TemporaryFolder();
-
-    @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();
+    private static final Logger log = 
LoggerFactory.getLogger(YarnFileStageTestS3ITCase.class);
 
-    @Rule public final RetryRule retryRule = new RetryRule();
+    private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
 
     /** Number of tests executed. */
     private static int numRecursiveUploadTests = 0;
 
-    /** Will be updated by {@link #checkCredentialsAndSetup()} if the test is 
not skipped. */
+    /** Will be updated by {@link #checkCredentialsAndSetup(File)} if the test 
is not skipped. */
     private static boolean skipTest = true;
 
-    @BeforeClass
-    public static void checkCredentialsAndSetup() throws IOException {
+    @BeforeAll
+    static void checkCredentialsAndSetup(@TempDir File tempFolder) throws 
IOException {
         // check whether credentials exist
         S3TestCredentials.assumeCredentialsAvailable();
 
         skipTest = false;
 
-        setupCustomHadoopConfig();
+        setupCustomHadoopConfig(tempFolder);
     }
 
-    @AfterClass
-    public static void resetFileSystemConfiguration() throws IOException {
+    @AfterAll
+    public static void resetFileSystemConfiguration() {
         FileSystem.initialize(new Configuration());
     }
 
-    @AfterClass
+    @AfterAll
     public static void checkAtLeastOneTestRun() {

Review Comment:
   ```suggestion
       static void checkAtLeastOneTestRun() {
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java:
##########
@@ -173,36 +172,27 @@ private void testRecursiveUploadForYarn(String scheme, 
String pathSuffix) throws
         }
     }
 
-    @Test
-    @RetryOnFailure(times = 3)
-    public void testRecursiveUploadForYarnS3n() throws Exception {
+    @TestTemplate
+    @RetryOnException(times = 3, exception = Exception.class)
+    public void testRecursiveUploadForYarnS3n(@TempDir File tempFolder) throws 
Exception {
         // skip test on Hadoop 3: 
https://issues.apache.org/jira/browse/HADOOP-14738
-        Assume.assumeTrue(
-                "This test is skipped for Hadoop versions above 3",
-                
VersionUtil.compareVersions(System.getProperty("hadoop.version"), "3.0.0") < 0);
-
-        try {
-            Class.forName("org.apache.hadoop.fs.s3native.NativeS3FileSystem");
-        } catch (ClassNotFoundException e) {
-            // not in the classpath, cannot run this test
-            String msg = "Skipping test because NativeS3FileSystem is not in 
the class path";
-            log.info(msg);
-            assumeNoException(msg, e);
-        }
-        testRecursiveUploadForYarn("s3n", "testYarn-s3n");
+        
assumeThat(VersionUtil.compareVersions(System.getProperty("hadoop.version"), 
"3.0.0") < 0)
+                .as("This test is skipped for Hadoop versions above 3")
+                .isTrue();
+
+        assumeThatThrownBy(() -> 
Class.forName("org.apache.hadoop.fs.s3native.NativeS3FileSystem"))
+                .as("Skipping test because NativeS3FileSystem is not in the 
class path")
+                .isNull();
+        testRecursiveUploadForYarn("s3n", "testYarn-s3n", tempFolder);
     }
 
-    @Test
-    @RetryOnFailure(times = 3)
-    public void testRecursiveUploadForYarnS3a() throws Exception {
-        try {
-            Class.forName("org.apache.hadoop.fs.s3a.S3AFileSystem");
-        } catch (ClassNotFoundException e) {
-            // not in the classpath, cannot run this test
-            String msg = "Skipping test because S3AFileSystem is not in the 
class path";
-            log.info(msg);
-            assumeNoException(msg, e);
-        }
-        testRecursiveUploadForYarn("s3a", "testYarn-s3a");
+    @TestTemplate
+    @RetryOnException(times = 3, exception = Exception.class)
+    public void testRecursiveUploadForYarnS3a(@TempDir File tempFolder) throws 
Exception {
+

Review Comment:
   ```suggestion
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java:
##########
@@ -24,79 +24,75 @@
 import org.apache.flink.core.fs.FileSystem;
 import org.apache.flink.core.fs.Path;
 import org.apache.flink.runtime.fs.hdfs.HadoopFileSystem;
-import org.apache.flink.testutils.junit.RetryOnFailure;
-import org.apache.flink.testutils.junit.RetryRule;
+import org.apache.flink.testutils.junit.RetryOnException;
+import org.apache.flink.testutils.junit.extensions.retry.RetryExtension;
 import org.apache.flink.testutils.s3.S3TestCredentials;
-import org.apache.flink.util.TestLogger;
 
 import org.apache.hadoop.util.VersionUtil;
-import org.junit.AfterClass;
-import org.junit.Assume;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.api.io.TempDir;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.nio.file.Files;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.UUID;
 
-import static org.hamcrest.Matchers.greaterThan;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assume.assumeFalse;
-import static org.junit.Assume.assumeNoException;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assumptions.assumeThat;
+import static org.assertj.core.api.Assumptions.assumeThatThrownBy;
 
 /**
  * Tests for verifying file staging during submission to YARN works with the 
S3A file system.
  *
  * <p>Note that the setup is similar to
  * <tt>org.apache.flink.fs.s3hadoop.HadoopS3FileSystemITCase</tt>.
  */
-public class YarnFileStageTestS3ITCase extends TestLogger {
+@ExtendWith(RetryExtension.class)
+public class YarnFileStageTestS3ITCase {
 
-    private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
-
-    @ClassRule public static final TemporaryFolder TEMP_FOLDER = new 
TemporaryFolder();
-
-    @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();
+    private static final Logger log = 
LoggerFactory.getLogger(YarnFileStageTestS3ITCase.class);
 
-    @Rule public final RetryRule retryRule = new RetryRule();
+    private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
 
     /** Number of tests executed. */
     private static int numRecursiveUploadTests = 0;
 
-    /** Will be updated by {@link #checkCredentialsAndSetup()} if the test is 
not skipped. */
+    /** Will be updated by {@link #checkCredentialsAndSetup(File)} if the test 
is not skipped. */
     private static boolean skipTest = true;
 
-    @BeforeClass
-    public static void checkCredentialsAndSetup() throws IOException {
+    @BeforeAll
+    static void checkCredentialsAndSetup(@TempDir File tempFolder) throws 
IOException {
         // check whether credentials exist
         S3TestCredentials.assumeCredentialsAvailable();
 
         skipTest = false;
 
-        setupCustomHadoopConfig();
+        setupCustomHadoopConfig(tempFolder);
     }
 
-    @AfterClass
-    public static void resetFileSystemConfiguration() throws IOException {
+    @AfterAll
+    public static void resetFileSystemConfiguration() {

Review Comment:
   ```suggestion
       static void resetFileSystemConfiguration() {
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/entrypoint/YarnEntrypointUtilsTest.java:
##########
@@ -132,37 +130,36 @@ public void testDynamicParameterOverloading() throws 
IOException {
         Configuration overloadedConfiguration =
                 loadConfiguration(initialConfiguration, dynamicParameters);
 
-        assertThat(
-                overloadedConfiguration.get(JobManagerOptions.JVM_METASPACE),
-                is(MemorySize.MAX_VALUE));
+        
assertThat(overloadedConfiguration.get(JobManagerOptions.JVM_METASPACE))
+                .isEqualTo(MemorySize.MAX_VALUE);
     }
 
     @Nonnull
-    private static Configuration loadConfiguration(Configuration 
initialConfiguration)
-            throws IOException {
+    private Configuration loadConfiguration(Configuration 
initialConfiguration) throws IOException {
         return loadConfiguration(initialConfiguration, new HashMap<>());
     }
 
     @Nonnull
-    private static Configuration loadConfiguration(
+    private Configuration loadConfiguration(
             Configuration initialConfiguration, Configuration 
dynamicParameters)
             throws IOException {
         return loadConfiguration(initialConfiguration, dynamicParameters, new 
HashMap<>());
     }
 
     @Nonnull
-    private static Configuration loadConfiguration(
+    private Configuration loadConfiguration(
             Configuration initialConfiguration, Map<String, String> env) 
throws IOException {
         return loadConfiguration(initialConfiguration, new Configuration(), 
env);
     }
 
     @Nonnull
-    private static Configuration loadConfiguration(

Review Comment:
   the methods could still be static



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to