This is an automated email from the ASF dual-hosted git repository.

godfrey pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 7a423666d0f8452382ad5fe2635de5ad1475dd46
Author: Yuxin Tan <[email protected]>
AuthorDate: Wed Apr 19 14:27:00 2023 +0800

    [FLINK-30815][tests] Migrate BatchAbstractTestBase to junit5
    
    This closes #22427
---
 .../connectors/hive/HiveTableSourceITCase.java     |  2 +-
 .../formats/json/JsonBatchFileSystemITCase.java    |  2 +-
 .../batch/sql/CompactManagedTableITCase.java       |  2 +-
 .../planner/runtime/batch/sql/FunctionITCase.java  |  8 +++--
 .../runtime/utils/BatchAbstractTestBase.java       | 38 ++++++++++++++++------
 .../batch/sql/LegacyTableSourceITCase.scala        |  8 ++---
 .../batch/sql/PartitionableSourceITCase.scala      |  7 ++--
 .../runtime/batch/sql/TableSinkITCase.scala        | 14 ++++----
 .../runtime/batch/sql/TableSourceITCase.scala      |  8 ++---
 9 files changed, 57 insertions(+), 32 deletions(-)

diff --git 
a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveTableSourceITCase.java
 
b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveTableSourceITCase.java
index adbf74e1798..4df9e473538 100644
--- 
a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveTableSourceITCase.java
+++ 
b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveTableSourceITCase.java
@@ -894,7 +894,7 @@ public class HiveTableSourceITCase extends 
BatchAbstractTestBase {
 
     private void testCaseInsensitive(String format) throws Exception {
         TableEnvironment tEnv = createTableEnvWithHiveCatalog(hiveCatalog);
-        String folderURI = TEMPORARY_FOLDER.newFolder().toURI().toString();
+        String folderURI = createTempFolder().toURI().toString();
 
         // Flink to write sensitive fields to parquet file
         tEnv.executeSql(
diff --git 
a/flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonBatchFileSystemITCase.java
 
b/flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonBatchFileSystemITCase.java
index b2efe8c863b..ff4c77d1cf9 100644
--- 
a/flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonBatchFileSystemITCase.java
+++ 
b/flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonBatchFileSystemITCase.java
@@ -102,7 +102,7 @@ public class JsonBatchFileSystemITCase extends 
BatchFileSystemITCaseBase {
     }
 
     private static File generateTestData(int numRecords) throws IOException {
-        File tempDir = TEMPORARY_FOLDER.newFolder();
+        File tempDir = createTempFolder();
 
         File root = new File(tempDir, "id=0");
         root.mkdir();
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
index c9e9e6faeaa..e5b263652a6 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/CompactManagedTableITCase.java
@@ -79,7 +79,7 @@ public class CompactManagedTableITCase extends BatchTestBase {
         try {
             rootPath =
                     new Path(
-                            new Path(TEMPORARY_FOLDER.newFolder().getPath()),
+                            new Path(createTempFolder().getPath()),
                             tableIdentifier.asSummaryString());
             rootPath.getFileSystem().mkdirs(rootPath);
         } catch (IOException e) {
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/FunctionITCase.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/FunctionITCase.java
index 7d886fe4fc6..100a4042392 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/FunctionITCase.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/FunctionITCase.java
@@ -27,6 +27,8 @@ import org.apache.flink.util.UserClassLoaderJarTestUtils;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.io.File;
+import java.nio.file.Files;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Random;
@@ -48,10 +50,12 @@ public class FunctionITCase extends BatchTestBase {
     public void before() throws Exception {
         super.before();
         udfClassName = GENERATED_LOWER_UDF_CLASS + random.nextInt(50);
+        File tmpJarDir =
+                new File(createTempFolder(), String.format("test-jar-%s", 
UUID.randomUUID()));
+        Files.createDirectory(tmpJarDir.toPath());
         jarPath =
                 UserClassLoaderJarTestUtils.createJarFile(
-                                TEMPORARY_FOLDER.newFolder(
-                                        String.format("test-jar-%s", 
UUID.randomUUID())),
+                                tmpJarDir,
                                 "test-classloader-udf.jar",
                                 udfClassName,
                                 String.format(GENERATED_LOWER_UDF_CODE, 
udfClassName))
diff --git 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/utils/BatchAbstractTestBase.java
 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/utils/BatchAbstractTestBase.java
index 6919241652d..6a6c17c0e72 100644
--- 
a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/utils/BatchAbstractTestBase.java
+++ 
b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/utils/BatchAbstractTestBase.java
@@ -22,31 +22,49 @@ import org.apache.flink.configuration.Configuration;
 import org.apache.flink.configuration.MemorySize;
 import org.apache.flink.configuration.TaskManagerOptions;
 import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration;
-import org.apache.flink.test.util.MiniClusterWithClientResource;
-import org.apache.flink.util.TestLogger;
+import org.apache.flink.test.junit5.MiniClusterExtension;
+import org.apache.flink.testutils.junit.utils.TempDirUtils;
 
-import org.junit.ClassRule;
-import org.junit.rules.TemporaryFolder;
+import org.junit.jupiter.api.extension.RegisterExtension;
+import org.junit.jupiter.api.io.TempDir;
 
-/** Batch test base to use {@link ClassRule}. */
-public class BatchAbstractTestBase extends TestLogger {
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Path;
+
+/** Batch test base to use {@link RegisterExtension}. */
+public class BatchAbstractTestBase {
 
     public static final int DEFAULT_PARALLELISM = 3;
 
-    @ClassRule
-    public static MiniClusterWithClientResource miniClusterResource =
-            new MiniClusterWithClientResource(
+    @RegisterExtension
+    public static MiniClusterExtension miniClusterResource =
+            new MiniClusterExtension(
                     new MiniClusterResourceConfiguration.Builder()
                             .setConfiguration(getConfiguration())
                             .setNumberTaskManagers(1)
                             .setNumberSlotsPerTaskManager(DEFAULT_PARALLELISM)
                             .build());
 
-    @ClassRule public static final TemporaryFolder TEMPORARY_FOLDER = new 
TemporaryFolder();
+    @TempDir public static Path tmpDir;
 
     private static Configuration getConfiguration() {
         Configuration config = new Configuration();
         config.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, 
MemorySize.parse("100m"));
         return config;
     }
+
+    public static File createTempFolder() throws IOException {
+        return TempDirUtils.newFolder(BatchAbstractTestBase.tmpDir);
+    }
+
+    public static File createTempFile() throws IOException {
+        Path tmpDirPath = createTempFolder().toPath();
+        return TempDirUtils.newFile(tmpDirPath);
+    }
+
+    public static File createFileInTempFolder(String fileName) throws 
IOException {
+        Path tmpDirPath = createTempFolder().toPath();
+        return TempDirUtils.newFile(tmpDirPath, fileName);
+    }
 }
diff --git 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/LegacyTableSourceITCase.scala
 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/LegacyTableSourceITCase.scala
index c07333608f0..4ef34d8fdce 100644
--- 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/LegacyTableSourceITCase.scala
+++ 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/LegacyTableSourceITCase.scala
@@ -23,7 +23,7 @@ import org.apache.flink.table.api.{DataTypes, TableSchema, 
Types}
 import org.apache.flink.table.api.config.TableConfigOptions
 import org.apache.flink.table.api.internal.TableEnvironmentInternal
 import org.apache.flink.table.planner.runtime.utils.{BatchTestBase, TestData}
-import 
org.apache.flink.table.planner.runtime.utils.BatchAbstractTestBase.TEMPORARY_FOLDER
+import 
org.apache.flink.table.planner.runtime.utils.BatchAbstractTestBase.{createFileInTempFolder,
 createTempFolder}
 import org.apache.flink.table.planner.runtime.utils.BatchTestBase.row
 import org.apache.flink.table.planner.utils.{TableTestUtil, 
TestDataTypeTableSource, TestFileInputFormatTableSource, 
TestInputFormatTableSource, TestLegacyFilterableTableSource, 
TestLegacyProjectableTableSource, TestNestedProjectableTableSource, 
TestPartitionableSourceFactory, TestTableSourceSinks}
 import org.apache.flink.table.runtime.types.TypeInfoDataTypeConverter
@@ -311,10 +311,10 @@ class LegacyTableSourceITCase extends BatchTestBase {
 
   @Test
   def testMultiPaths(): Unit = {
-    val tmpFile1 = TEMPORARY_FOLDER.newFile("tmpFile1.tmp")
+    val tmpFile1 = createFileInTempFolder("tmpFile1.tmp")
     new FileWriter(tmpFile1).append("t1\n").append("t2\n").close()
 
-    val tmpFile2 = TEMPORARY_FOLDER.newFile("tmpFile2.tmp")
+    val tmpFile2 = createFileInTempFolder("tmpFile2.tmp")
     new FileWriter(tmpFile2).append("t3\n").append("t4\n").close()
 
     val schema = new TableSchema(Array("a"), Array(Types.STRING))
@@ -347,7 +347,7 @@ class LegacyTableSourceITCase extends BatchTestBase {
          |)
        """.stripMargin
     tEnv.executeSql(ddl)
-    val resultPath = TEMPORARY_FOLDER.newFolder().getAbsolutePath
+    val resultPath = createTempFolder().getAbsolutePath
     tEnv.executeSql(s"""
                        |CREATE TABLE MySink (
                        |  `a` BIGINT,
diff --git 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/PartitionableSourceITCase.scala
 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/PartitionableSourceITCase.scala
index ad51e49412c..8338d3d3caf 100644
--- 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/PartitionableSourceITCase.scala
+++ 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/PartitionableSourceITCase.scala
@@ -19,7 +19,8 @@ package org.apache.flink.table.planner.runtime.batch.sql
 
 import org.apache.flink.table.catalog.{CatalogPartitionImpl, 
CatalogPartitionSpec, ObjectPath}
 import org.apache.flink.table.planner.factories.{TestValuesCatalog, 
TestValuesTableFactory}
-import 
org.apache.flink.table.planner.runtime.utils.BatchAbstractTestBase.TEMPORARY_FOLDER
+import org.apache.flink.table.planner.runtime.utils.BatchAbstractTestBase
+import 
org.apache.flink.table.planner.runtime.utils.BatchAbstractTestBase.createTempFolder
 import org.apache.flink.table.planner.runtime.utils.BatchTestBase
 import org.apache.flink.table.planner.runtime.utils.BatchTestBase.row
 import org.apache.flink.table.planner.utils.TestingTableEnvironment
@@ -188,10 +189,10 @@ class PartitionableSourceITCase(val 
sourceFetchPartitions: Boolean, val useCatal
          |   }
          |}
          |""".stripMargin
-    val tmpDir = TEMPORARY_FOLDER.newFolder()
+    val tmpJarDir = createTempFolder()
     val udfJarFile =
       UserClassLoaderJarTestUtils.createJarFile(
-        tmpDir,
+        tmpJarDir,
         "flink-test-udf.jar",
         "TrimUDF",
         udfJavaCode)
diff --git 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableSinkITCase.scala
 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableSinkITCase.scala
index b4224519faa..b2e048c80a6 100644
--- 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableSinkITCase.scala
+++ 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableSinkITCase.scala
@@ -21,7 +21,8 @@ import org.apache.flink.configuration.MemorySize
 import org.apache.flink.core.testutils.FlinkMatchers
 import 
org.apache.flink.streaming.api.operators.collect.CollectSinkOperatorFactory
 import org.apache.flink.table.planner.factories.TestValuesTableFactory
-import org.apache.flink.table.planner.runtime.utils.{BatchAbstractTestBase, 
BatchTestBase}
+import 
org.apache.flink.table.planner.runtime.utils.BatchAbstractTestBase.createTempFolder
+import org.apache.flink.table.planner.runtime.utils.BatchTestBase
 import org.apache.flink.table.planner.runtime.utils.BatchTestBase.row
 import org.apache.flink.table.planner.runtime.utils.TestData.smallData3
 import org.apache.flink.table.planner.utils.TableTestUtil
@@ -47,7 +48,7 @@ class TableSinkITCase extends BatchTestBase {
                        |)
        """.stripMargin)
 
-    val resultPath = 
BatchAbstractTestBase.TEMPORARY_FOLDER.newFolder().getAbsolutePath
+    val resultPath = createTempFolder().getAbsolutePath
     tEnv.executeSql(s"""
                        |CREATE TABLE MySink (
                        |  `a` INT,
@@ -60,10 +61,10 @@ class TableSinkITCase extends BatchTestBase {
                        |)
        """.stripMargin)
     val stmtSet = tEnv.createStatementSet()
-    val newPath1 = 
BatchAbstractTestBase.TEMPORARY_FOLDER.newFolder().getAbsolutePath
+    val newPath1 = createTempFolder().getAbsolutePath
     stmtSet.addInsertSql(
       s"insert into MySink /*+ OPTIONS('path' = '$newPath1') */ select * from 
MyTable")
-    val newPath2 = 
BatchAbstractTestBase.TEMPORARY_FOLDER.newFolder().getAbsolutePath
+    val newPath2 = createTempFolder().getAbsolutePath
     stmtSet.addInsertSql(
       s"insert into MySink /*+ OPTIONS('path' = '$newPath2') */ select * from 
MyTable")
     stmtSet.execute().await()
@@ -110,7 +111,7 @@ class TableSinkITCase extends BatchTestBase {
                        |)
        """.stripMargin)
 
-    val resultPath = 
BatchAbstractTestBase.TEMPORARY_FOLDER.newFolder().getAbsolutePath
+    val resultPath = createTempFolder().getAbsolutePath
     tEnv
       .executeSql(s"""
                      |CREATE TABLE MyCtasTable
@@ -128,7 +129,8 @@ class TableSinkITCase extends BatchTestBase {
 
     // test statement set
     val statementSet = tEnv.createStatementSet()
-    val useStatementResultPath = 
BatchAbstractTestBase.TEMPORARY_FOLDER.newFolder().getAbsolutePath
+    val useStatementResultPath =
+      createTempFolder().getAbsolutePath
     statementSet.addInsertSql(s"""
                                  |CREATE TABLE MyCtasTableUseStatement
                                  | WITH (
diff --git 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableSourceITCase.scala
 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableSourceITCase.scala
index e5877c913a3..70eaea46015 100644
--- 
a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableSourceITCase.scala
+++ 
b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/TableSourceITCase.scala
@@ -21,7 +21,7 @@ import org.apache.flink.table.catalog.ObjectPath
 import org.apache.flink.table.planner.factories.TestValuesTableFactory
 import org.apache.flink.table.planner.plan.optimize.RelNodeBlockPlanBuilder
 import org.apache.flink.table.planner.runtime.utils.{BatchTestBase, TestData}
-import 
org.apache.flink.table.planner.runtime.utils.BatchAbstractTestBase.TEMPORARY_FOLDER
+import 
org.apache.flink.table.planner.runtime.utils.BatchAbstractTestBase.{createTempFile,
 createTempFolder}
 import org.apache.flink.table.planner.runtime.utils.BatchTestBase.row
 import org.apache.flink.table.planner.utils._
 import org.apache.flink.util.FileUtils
@@ -333,7 +333,7 @@ class TableSourceITCase extends BatchTestBase {
 
   @Test
   def testSourceProvider(): Unit = {
-    val file = TEMPORARY_FOLDER.newFile()
+    val file = createTempFile()
     file.delete()
     file.createNewFile()
     FileUtils.writeFileUtf8(file, "1\n5\n6")
@@ -356,7 +356,7 @@ class TableSourceITCase extends BatchTestBase {
 
   @Test
   def testTableHint(): Unit = {
-    val resultPath = TEMPORARY_FOLDER.newFolder().getAbsolutePath
+    val resultPath = createTempFolder().getAbsolutePath
     tEnv.executeSql(s"""
                        |CREATE TABLE MySink (
                        |  `a` INT,
@@ -390,7 +390,7 @@ class TableSourceITCase extends BatchTestBase {
     tEnv.getConfig.set(
       
RelNodeBlockPlanBuilder.TABLE_OPTIMIZER_REUSE_OPTIMIZE_BLOCK_WITH_DIGEST_ENABLED,
       Boolean.box(true))
-    val resultPath = TEMPORARY_FOLDER.newFolder().getAbsolutePath
+    val resultPath = createTempFolder().getAbsolutePath
     tEnv.executeSql(s"""
                        |CREATE TABLE MySink (
                        |  `a` INT,

Reply via email to