This is an automated email from the ASF dual-hosted git repository.

yihua pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new a5c531f64fe [HUDI-9513] Update test profiles to just use tags, fix 
current test issues (#13407)
a5c531f64fe is described below

commit a5c531f64fe1d773a1a5fed1dcf13ce219a6a741
Author: Tim Brown <[email protected]>
AuthorDate: Tue Jun 10 21:44:03 2025 -0500

    [HUDI-9513] Update test profiles to just use tags, fix current test issues 
(#13407)
---
 .../org/apache/hudi/ClientFunctionalTestSuite.java | 32 ---------------
 .../bucket/TestPartitionBucketIndexCalculator.java |  2 +-
 .../hudi/utils/HoodieWriterClientTestHarness.java  | 29 +++++++-------
 hudi-client/hudi-spark-client/pom.xml              |  4 ++
 .../java/org/apache/hudi/client/TestMultiFS.java   |  8 ++--
 .../functional/SparkClientFunctionalTestSuite.java | 35 -----------------
 .../hudi/testutils/HoodieClientTestUtils.java      |  2 +-
 .../hudi/testutils/providers/SparkProvider.java    |  3 +-
 .../common/model/TestHoodieCommitMetadata.java     |  4 +-
 hudi-flink-datasource/hudi-flink/pom.xml           |  5 +++
 .../lock/TestInProcessLockProvider.java            |  3 +-
 .../org/apache/hudi/io/hfile/TestHFileWriter.java  |  1 -
 .../apache/hudi/TestDataSourceReadWithDeletes.java |  9 +++--
 .../hudi/client/TestHoodieClientMultiWriter.java   | 13 ++++---
 .../TestHoodieClientOnCopyOnWriteStorage.java      |  7 +---
 .../TestHoodieClientOnMergeOnReadStorage.java      |  3 --
 .../HoodieSparkFunctionalTestSuiteA.java           | 29 --------------
 .../HoodieSparkFunctionalTestSuiteB.java           | 29 --------------
 .../hudi/functional/TestBootstrapReadBase.java     |  2 -
 ...dieSparkMergeOnReadTableInsertUpdateDelete.java |  3 ++
 .../table/functional/TestHoodieSparkRollback.java  |  4 ++
 .../TestSparkNonBlockingConcurrencyControl.java    | 16 ++++++++
 .../src/test/resources/exampleSchema.txt           |  6 +--
 .../TestIncrementalQueryWithArchivedInstants.scala | 11 +-----
 .../hudi/functional/HoodieStatsIndexTestBase.scala |  6 +--
 .../apache/hudi/functional/TestCOWDataSource.scala |  5 ++-
 .../hudi/functional/TestPartitionStatsIndex.scala  |  1 +
 .../functional/TestSecondaryIndexPruning.scala     |  1 +
 .../spark/sql/avro/TestSchemaConverters.scala      |  2 +-
 .../hudi/feature/index/TestExpressionIndex.scala   |  1 +
 .../sql/hudi/procedure/TestTTLProcedure.scala      |  1 +
 .../functional/HiveSyncFunctionalTestSuite.java    | 33 ----------------
 hudi-tests-common/pom.xml                          | 14 -------
 .../deltastreamer/TestHoodieDeltaStreamer.java     |  3 +-
 .../functional/UtilitiesFunctionalTestSuite.java   | 32 ---------------
 .../TestHoodieMultiTableServicesMain.java          | 23 +++++------
 .../utilities/testutils/UtilitiesTestBase.java     |  2 +-
 pom.xml                                            | 45 +++++++++-------------
 38 files changed, 121 insertions(+), 308 deletions(-)

diff --git 
a/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/ClientFunctionalTestSuite.java
 
b/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/ClientFunctionalTestSuite.java
deleted file mode 100644
index 4e62618c8cb..00000000000
--- 
a/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/ClientFunctionalTestSuite.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hudi;
-
-import org.junit.platform.runner.JUnitPlatform;
-import org.junit.platform.suite.api.IncludeTags;
-import org.junit.platform.suite.api.SelectPackages;
-import org.junit.runner.RunWith;
-
-@RunWith(JUnitPlatform.class)
-@SelectPackages("org.apache.hudi.index")
-@IncludeTags("functional")
-public class ClientFunctionalTestSuite {
-
-}
diff --git 
a/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/index/bucket/TestPartitionBucketIndexCalculator.java
 
b/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/index/bucket/TestPartitionBucketIndexCalculator.java
index 2157e8a5d4b..c8f8a3f4529 100644
--- 
a/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/index/bucket/TestPartitionBucketIndexCalculator.java
+++ 
b/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/index/bucket/TestPartitionBucketIndexCalculator.java
@@ -26,8 +26,8 @@ import 
org.apache.hudi.index.bucket.partition.PartitionBucketIndexCalculator;
 import org.junit.jupiter.api.AfterEach;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.MethodSource;
 import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
 
 import java.io.IOException;
 import java.util.stream.Stream;
diff --git 
a/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/utils/HoodieWriterClientTestHarness.java
 
b/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/utils/HoodieWriterClientTestHarness.java
index dabdabdeb5e..fa4540cf48e 100644
--- 
a/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/utils/HoodieWriterClientTestHarness.java
+++ 
b/hudi-client/hudi-client-common/src/test/java/org/apache/hudi/utils/HoodieWriterClientTestHarness.java
@@ -376,7 +376,7 @@ public abstract class HoodieWriterClientTestHarness extends 
HoodieCommonTestHarn
             
.withConsistencyGuardConfig(ConsistencyGuardConfig.newBuilder().withConsistencyCheckEnabled(true)
                     
.withMaxConsistencyCheckIntervalMs(1).withInitialConsistencyCheckIntervalMs(1)
                     
.withEnableOptimisticConsistencyGuard(enableOptimisticConsistencyGuard).build())
-            .build() :
+                    .withProperties(properties).build() :
             getConfigBuilder().withRollbackUsingMarkers(rollbackUsingMarkers)
                     
.withConsistencyGuardConfig(ConsistencyGuardConfig.newBuilder()
                             .withConsistencyCheckEnabled(true)
@@ -385,15 +385,16 @@ public abstract class HoodieWriterClientTestHarness 
extends HoodieCommonTestHarn
                     .withProperties(properties).build();
   }
 
-  protected HoodieWriteConfig getConsistencyCheckWriteConfig(boolean 
enableOptimisticConsistencyGuard) {
-    return !enableOptimisticConsistencyGuard ? (getConfigBuilder()
-            
.withConsistencyGuardConfig(ConsistencyGuardConfig.newBuilder().withConsistencyCheckEnabled(true)
-                    
.withMaxConsistencyCheckIntervalMs(1).withInitialConsistencyCheckIntervalMs(1).withEnableOptimisticConsistencyGuard(enableOptimisticConsistencyGuard).build())
-            .build()) : (getConfigBuilder()
-            
.withConsistencyGuardConfig(ConsistencyGuardConfig.newBuilder().withConsistencyCheckEnabled(true)
-                    
.withEnableOptimisticConsistencyGuard(enableOptimisticConsistencyGuard)
-                    .withOptimisticConsistencyGuardSleepTimeMs(1).build())
-            .build());
+  protected HoodieWriteConfig getConsistencyCheckWriteConfig(boolean 
enableOptimisticConsistencyGuard, boolean populateMetaFields) {
+    HoodieWriteConfig.Builder builder = 
getConfigBuilder().withProperties(getPropertiesForKeyGen(populateMetaFields));
+    return !enableOptimisticConsistencyGuard ? (builder
+        
.withConsistencyGuardConfig(ConsistencyGuardConfig.newBuilder().withConsistencyCheckEnabled(true)
+            
.withMaxConsistencyCheckIntervalMs(1).withInitialConsistencyCheckIntervalMs(1).withEnableOptimisticConsistencyGuard(enableOptimisticConsistencyGuard).build())
+        .build()) : (builder
+        
.withConsistencyGuardConfig(ConsistencyGuardConfig.newBuilder().withConsistencyCheckEnabled(true)
+            
.withEnableOptimisticConsistencyGuard(enableOptimisticConsistencyGuard)
+            .withOptimisticConsistencyGuardSleepTimeMs(1).build())
+        .build());
   }
 
   protected HoodieWriteConfig 
getParallelWritingWriteConfig(HoodieFailedWritesCleaningPolicy cleaningPolicy, 
boolean populateMetaFields) {
@@ -708,11 +709,11 @@ public abstract class HoodieWriterClientTestHarness 
extends HoodieCommonTestHarn
   }
 
   protected Pair<StoragePath, List<WriteStatus>> 
testConsistencyCheck(HoodieEngineContext context, HoodieTableMetaClient 
metaClient,
-                                                                    String 
instantTime, boolean enableOptimisticConsistencyGuard,
+                                                                    String 
instantTime, boolean enableOptimisticConsistencyGuard, boolean 
populateMetaFields,
                                                                     
Function2<HoodieTable, HoodieTableMetaClient, HoodieWriteConfig> 
getHoodieTableFn,
                                                                     Function 
transformInputFn, Function transformOutputFn)
           throws Exception {
-    HoodieWriteConfig cfg = 
getConsistencyCheckWriteConfig(enableOptimisticConsistencyGuard);
+    HoodieWriteConfig cfg = 
getConsistencyCheckWriteConfig(enableOptimisticConsistencyGuard, 
populateMetaFields);
     BaseHoodieWriteClient client = getHoodieWriteClient(cfg);
 
     WriteClientTestUtils.startCommitWithTime(client, instantTime);
@@ -764,7 +765,7 @@ public abstract class HoodieWriterClientTestHarness extends 
HoodieCommonTestHarn
     BaseHoodieWriteClient client = getHoodieWriteClient(cfg);
     client.setOperationType(WriteOperationType.UPSERT);
     Pair<StoragePath, List<WriteStatus>> result = 
testConsistencyCheck(context, metaClient, instantTime,
-              enableOptimisticConsistencyGuard, getHoodieTableFn, 
transformInputFn, transformOutputFn);
+              enableOptimisticConsistencyGuard, true, getHoodieTableFn, 
transformInputFn, transformOutputFn);
 
     // Delete orphan marker and commit should succeed
     metaClient.getStorage().deleteFile(result.getKey());
@@ -788,7 +789,7 @@ public abstract class HoodieWriterClientTestHarness extends 
HoodieCommonTestHarn
     String instantTime = "00000000000010";
     HoodieWriteConfig cfg = 
getRollbackMarkersAndConsistencyGuardWriteConfig(rollbackUsingMarkers, 
enableOptimisticConsistencyGuard, populateMetaFields);
     BaseHoodieWriteClient client = getHoodieWriteClient(cfg);
-    testConsistencyCheck(context, metaClient, instantTime, 
enableOptimisticConsistencyGuard, getHoodieTableFn, transformInputFn, 
transformOutputFn);
+    testConsistencyCheck(context, metaClient, instantTime, 
enableOptimisticConsistencyGuard, populateMetaFields, getHoodieTableFn, 
transformInputFn, transformOutputFn);
     rollbackAndAssert(enableOptimisticConsistencyGuard, instantTime, 
metaClient, client);
   }
 
diff --git a/hudi-client/hudi-spark-client/pom.xml 
b/hudi-client/hudi-spark-client/pom.xml
index 9a1b1d635d9..fb65be6af2c 100644
--- a/hudi-client/hudi-spark-client/pom.xml
+++ b/hudi-client/hudi-spark-client/pom.xml
@@ -181,6 +181,10 @@
           <groupId>org.pentaho</groupId>
           <artifactId>*</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>junit</groupId>
+          <artifactId>junit</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
diff --git 
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/TestMultiFS.java
 
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/TestMultiFS.java
index a5be3cf76bb..823fac6a5e7 100644
--- 
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/TestMultiFS.java
+++ 
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/TestMultiFS.java
@@ -131,7 +131,7 @@ public class TestMultiFS extends 
HoodieSparkClientTestHarness {
 
       // Write generated data to hdfs (only inserts)
       String readCommitTime = hdfsWriteClient.startCommit();
-      LOG.info("Starting commit " + readCommitTime);
+      LOG.info("Starting commit {}", readCommitTime);
       List<HoodieRecord> records = dataGen.generateInserts(readCommitTime, 10);
       JavaRDD<HoodieRecord> writeRecords = jsc.parallelize(records, 2);
       JavaRDD<WriteStatus> writeStatusJavaRDD = 
hdfsWriteClient.upsert(writeRecords, readCommitTime);
@@ -152,14 +152,14 @@ public class TestMultiFS extends 
HoodieSparkClientTestHarness {
           .initTable(storageConf.newInstance(), tablePath);
 
       String writeCommitTime = localWriteClient.startCommit();
-      LOG.info("Starting write commit " + writeCommitTime);
+      LOG.info("Starting write commit {}", writeCommitTime);
       List<HoodieRecord> localRecords = 
dataGen.generateInserts(writeCommitTime, 10);
       JavaRDD<HoodieRecord> localWriteRecords = jsc.parallelize(localRecords, 
2);
-      LOG.info("Writing to path: " + tablePath);
+      LOG.info("Writing to path: {}", tablePath);
       writeStatusJavaRDD = localWriteClient.upsert(localWriteRecords, 
writeCommitTime);
       localWriteClient.commit(writeCommitTime, writeStatusJavaRDD, 
Option.empty(), COMMIT_ACTION, Collections.emptyMap(), Option.empty());
 
-      LOG.info("Reading from path: " + tablePath);
+      LOG.info("Reading from path: {}", tablePath);
       fs = HadoopFSUtils.getFs(tablePath, 
HoodieTestUtils.getDefaultStorageConf());
       metaClient = HoodieTestUtils.createMetaClient(new 
HadoopStorageConfiguration(fs.getConf()), tablePath);
       timeline = 
TIMELINE_FACTORY.createActiveTimeline(metaClient).getCommitAndReplaceTimeline();
diff --git 
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/functional/SparkClientFunctionalTestSuite.java
 
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/functional/SparkClientFunctionalTestSuite.java
deleted file mode 100644
index 5b20a51f5a2..00000000000
--- 
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/functional/SparkClientFunctionalTestSuite.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hudi.functional;
-
-import org.junit.platform.runner.JUnitPlatform;
-import org.junit.platform.suite.api.IncludeTags;
-import org.junit.platform.suite.api.SelectPackages;
-import org.junit.runner.RunWith;
-
-@RunWith(JUnitPlatform.class)
-@SelectPackages({
-    "org.apache.hudi.client.functional",
-    "org.apache.hudi.table.functional",
-    "org.apache.hudi.index.hbase"})
-@IncludeTags("functional")
-public class SparkClientFunctionalTestSuite {
-
-}
diff --git 
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/HoodieClientTestUtils.java
 
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/HoodieClientTestUtils.java
index 56d5b26ef31..07bfad1f787 100644
--- 
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/HoodieClientTestUtils.java
+++ 
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/HoodieClientTestUtils.java
@@ -92,7 +92,7 @@ public class HoodieClientTestUtils {
    */
   public static SparkConf getSparkConfForTest(String appName) {
     SparkConf sparkConf = new SparkConf().setAppName(appName)
-        .setMaster("local[8]")
+        .setMaster("local[8,1]")
         .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
         .set("spark.kryo.registrator", 
"org.apache.spark.HoodieSparkKryoRegistrar")
         .set("spark.sql.shuffle.partitions", "4")
diff --git 
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/providers/SparkProvider.java
 
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/providers/SparkProvider.java
index 91045034e5f..184450d0101 100644
--- 
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/providers/SparkProvider.java
+++ 
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/providers/SparkProvider.java
@@ -38,7 +38,7 @@ public interface SparkProvider extends 
org.apache.hudi.testutils.providers.Hoodi
   default SparkConf conf(Map<String, String> overwritingConfigs) {
     SparkConf sparkConf = new SparkConf();
     sparkConf.set("spark.app.name", getClass().getName());
-    sparkConf.set("spark.master", "local[8]");
+    sparkConf.set("spark.master", "local[8,1]");
     sparkConf.set("spark.default.parallelism", "4");
     sparkConf.set("spark.sql.shuffle.partitions", "4");
     sparkConf.set("spark.driver.maxResultSize", "2g");
@@ -48,6 +48,7 @@ public interface SparkProvider extends 
org.apache.hudi.testutils.providers.Hoodi
     sparkConf.set("spark.hadoop.mapred.output.compression.type", "BLOCK");
     sparkConf.set("spark.serializer", 
"org.apache.spark.serializer.KryoSerializer");
     sparkConf.set("spark.kryo.registrator", 
"org.apache.spark.HoodieSparkKryoRegistrar");
+    sparkConf.set("spark.ui.enabled", "false");
     overwritingConfigs.forEach(sparkConf::set);
     return sparkConf;
   }
diff --git 
a/hudi-common/src/test/java/org/apache/hudi/common/model/TestHoodieCommitMetadata.java
 
b/hudi-common/src/test/java/org/apache/hudi/common/model/TestHoodieCommitMetadata.java
index f597524f9fe..0045c1b4b12 100644
--- 
a/hudi-common/src/test/java/org/apache/hudi/common/model/TestHoodieCommitMetadata.java
+++ 
b/hudi-common/src/test/java/org/apache/hudi/common/model/TestHoodieCommitMetadata.java
@@ -182,13 +182,13 @@ public class TestHoodieCommitMetadata {
     HoodieInstant legacyInstant = 
INSTANT_GENERATOR.createNewInstant(HoodieInstant.State.COMPLETED, "commit", 
"1", "1", true);
     CommitMetadataSerDe v1SerDe = new CommitMetadataSerDeV1();
     byte[] v1Bytes = convertMetadataToByteArray(commitMetadata1, v1SerDe);
-    System.out.println(new String(v1Bytes));
+
     org.apache.hudi.common.model.HoodieCommitMetadata commitMetadata2 =
         COMMIT_METADATA_SER_DE.deserialize(legacyInstant, new 
ByteArrayInputStream(v1Bytes), () -> false, 
org.apache.hudi.common.model.HoodieCommitMetadata.class);
     assertEquals(2, commitMetadata2.partitionToWriteStats.size());
     assertEquals(2, 
commitMetadata2.partitionToWriteStats.get("partition1").size());
     assertEquals(2, 
commitMetadata2.partitionToWriteStats.get("partition1").size());
-    
System.out.println(commitMetadata2.partitionToWriteStats.get("partition1").get(0));
+
     assertEquals("111", 
commitMetadata2.partitionToWriteStats.get("partition1").get(0).getFileId());
     assertEquals("222", 
commitMetadata2.partitionToWriteStats.get("partition1").get(1).getFileId());
     assertEquals("333", 
commitMetadata2.partitionToWriteStats.get("partition2").get(0).getFileId());
diff --git a/hudi-flink-datasource/hudi-flink/pom.xml 
b/hudi-flink-datasource/hudi-flink/pom.xml
index 97d1fede802..dcdf8a67c05 100644
--- a/hudi-flink-datasource/hudi-flink/pom.xml
+++ b/hudi-flink-datasource/hudi-flink/pom.xml
@@ -349,6 +349,11 @@
             </exclusions>
         </dependency>
         <!-- Test dependencies -->
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
         <dependency>
             <groupId>org.apache.hudi</groupId>
             <artifactId>hudi-tests-common</artifactId>
diff --git 
a/hudi-hadoop-common/src/test/java/org/apache/hudi/client/transaction/lock/TestInProcessLockProvider.java
 
b/hudi-hadoop-common/src/test/java/org/apache/hudi/client/transaction/lock/TestInProcessLockProvider.java
index 47c06e43b9e..c7056374471 100644
--- 
a/hudi-hadoop-common/src/test/java/org/apache/hudi/client/transaction/lock/TestInProcessLockProvider.java
+++ 
b/hudi-hadoop-common/src/test/java/org/apache/hudi/client/transaction/lock/TestInProcessLockProvider.java
@@ -24,7 +24,6 @@ import org.apache.hudi.common.config.TypedProperties;
 import org.apache.hudi.exception.HoodieLockException;
 import org.apache.hudi.storage.StorageConfiguration;
 
-import junit.framework.AssertionFailedError;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
@@ -123,7 +122,7 @@ public class TestInProcessLockProvider {
       boolean isLocked = lockProvider3.getLock().isWriteLocked();
       if (!isLocked) {
         writer3TryLock.set(true);
-        throw new AssertionFailedError("The lock instance in Writer 3 should 
be held by Writer 2: "
+        throw new RuntimeException("The lock instance in Writer 3 should be 
held by Writer 2: "
             + lockProvider3.getLock());
       }
       assertDoesNotThrow(() -> {
diff --git 
a/hudi-io/src/test/java/org/apache/hudi/io/hfile/TestHFileWriter.java 
b/hudi-io/src/test/java/org/apache/hudi/io/hfile/TestHFileWriter.java
index a90c582c51f..fe9cf62dfbb 100644
--- a/hudi-io/src/test/java/org/apache/hudi/io/hfile/TestHFileWriter.java
+++ b/hudi-io/src/test/java/org/apache/hudi/io/hfile/TestHFileWriter.java
@@ -142,7 +142,6 @@ class TestHFileWriter {
       reader.seekTo();
       for (int i = 0; i < 50; i++) {
         KeyValue kv = reader.getKeyValue().get();
-        System.out.println(kv.getKey().getContentInString());
         assertArrayEquals(
             String.format("key%02d", i).getBytes(),
             kv.getKey().getContentInString().getBytes());
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/TestDataSourceReadWithDeletes.java
 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/TestDataSourceReadWithDeletes.java
index 3c9308c106c..d0d7967a2d8 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/TestDataSourceReadWithDeletes.java
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/TestDataSourceReadWithDeletes.java
@@ -76,7 +76,7 @@ public class TestDataSourceReadWithDeletes extends 
SparkClientFunctionalTestHarn
       + "    {\"name\": \"name\", \"type\": [\"null\", \"string\"]},\n"
       + "    {\"name\": \"age\", \"type\": [\"null\", \"int\"]},\n"
       + "    {\"name\": \"ts\", \"type\": [\"null\", \"long\"]},\n"
-      + "    {\"name\": \"part\", \"type\": [\"null\", \"string\"]}\n"
+      + "    {\"name\": \"partition_path\", \"type\": [\"null\", 
\"string\"]}\n"
       + "  ]\n"
       + "}";
 
@@ -111,13 +111,14 @@ public class TestDataSourceReadWithDeletes extends 
SparkClientFunctionalTestHarn
     List<Row> rows = spark().read().format("org.apache.hudi")
         .option("hoodie.datasource.query.type", "snapshot")
         .load(config.getBasePath())
-        .select("id", "name", "age", "ts", "part")
+        .select("id", "name", "age", "ts", "partition_path")
         .collectAsList();
     assertEquals(2, rows.size());
     String[] expected = new String[] {
         "[id1,Danny,30,2,par1]",
         "[id3,Julian,40,2,par1]"};
     assertArrayEquals(expected, 
rows.stream().map(Row::toString).sorted().toArray(String[]::new));
+    client.close();
   }
 
   private HoodieWriteConfig createHoodieWriteConfig() {
@@ -171,10 +172,10 @@ public class TestDataSourceReadWithDeletes extends 
SparkClientFunctionalTestHarn
       record.put("name", parts[2]);
       record.put("age", Integer.parseInt(parts[3]));
       record.put("ts", Long.parseLong(parts[4]));
-      record.put("part", parts[5]);
+      record.put("partition_path", parts[5]);
       OverwriteWithLatestAvroPayload payload = new 
OverwriteWithLatestAvroPayload(record, (Long) record.get("ts"));
       return new HoodieAvroRecord<>(
-          new HoodieKey((String) record.get("id"), (String) 
record.get("part")),
+          new HoodieKey((String) record.get("id"), (String) 
record.get("partition_path")),
           payload,
           isDelete ? HoodieOperation.DELETE : HoodieOperation.INSERT);
     }).collect(Collectors.toList());
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/client/TestHoodieClientMultiWriter.java
 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/client/TestHoodieClientMultiWriter.java
index 328457ac8be..1d3d30c8593 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/client/TestHoodieClientMultiWriter.java
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/client/TestHoodieClientMultiWriter.java
@@ -305,12 +305,13 @@ public class TestHoodieClientMultiWriter extends 
HoodieClientTestBase {
       // to save the checkpoint.
       HoodieWriteConfig writeConfig22 = 
HoodieWriteConfig.newBuilder().withProperties(writeConfig.getProps()).build();
       writeConfig22.setSchema("\"null\"");
-      final SparkRDDWriteClient client22 = getHoodieWriteClient(writeConfig22);
-      JavaRDD<HoodieRecord> emptyRDD = jsc.emptyRDD();
-      // Perform upsert with empty RDD
-      WriteClientTestUtils.startCommitWithTime(client22, "0013");
-      JavaRDD<WriteStatus> writeStatusRDD = client22.upsert(emptyRDD, "0013");
-      client22.commit("0013", writeStatusRDD);
+      try (final SparkRDDWriteClient client22 = 
getHoodieWriteClient(writeConfig22)) {
+        JavaRDD<HoodieRecord> emptyRDD = jsc.emptyRDD();
+        // Perform upsert with empty RDD
+        WriteClientTestUtils.startCommitWithTime(client22, "0013");
+        JavaRDD<WriteStatus> writeStatusRDD = client22.upsert(emptyRDD, 
"0013");
+        client22.commit("0013", writeStatusRDD);
+      }
       totalCommits += 1;
 
       // Validate table schema in the end.
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnCopyOnWriteStorage.java
 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnCopyOnWriteStorage.java
index 879ef15bda1..83678e1d7f1 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnCopyOnWriteStorage.java
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnCopyOnWriteStorage.java
@@ -152,11 +152,8 @@ import static org.junit.jupiter.api.Assertions.fail;
 @Tag("functional")
 public class TestHoodieClientOnCopyOnWriteStorage extends HoodieClientTestBase 
{
 
-  private static final Map<String, String> STRATEGY_PARAMS = new 
HashMap<String, String>() {
-    {
-      put("sortColumn", "record_key");
-    }
-  };
+  private static final Map<String, String> STRATEGY_PARAMS = 
Collections.singletonMap("sortColumn", "record_key");
+
   private static Stream<Arguments> smallInsertHandlingParams() {
     return Arrays.stream(new Boolean[][] {{true}, {false}}).map(Arguments::of);
   }
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnMergeOnReadStorage.java
 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnMergeOnReadStorage.java
index 1b57a93eb6c..655cee23a96 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnMergeOnReadStorage.java
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/client/functional/TestHoodieClientOnMergeOnReadStorage.java
@@ -32,7 +32,6 @@ import org.apache.hudi.common.table.timeline.HoodieInstant;
 import org.apache.hudi.common.table.timeline.HoodieTimeline;
 import org.apache.hudi.common.table.view.SyncableFileSystemView;
 import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
-import org.apache.hudi.common.testutils.HoodieTestTable;
 import org.apache.hudi.common.util.Option;
 import org.apache.hudi.config.HoodieArchivalConfig;
 import org.apache.hudi.config.HoodieCleanConfig;
@@ -70,8 +69,6 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
 
 public class TestHoodieClientOnMergeOnReadStorage extends HoodieClientTestBase 
{
 
-  private HoodieTestTable testTable;
-
   @BeforeEach
   public void setUpTestTable() {
     testTable = HoodieSparkWriteableTestTable.of(metaClient);
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuiteA.java
 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuiteA.java
deleted file mode 100644
index 84bddd30c6e..00000000000
--- 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuiteA.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hudi.functional;
-
-import org.junit.platform.runner.JUnitPlatform;
-import org.junit.platform.suite.api.IncludeTags;
-import org.junit.platform.suite.api.SelectPackages;
-import org.junit.runner.RunWith;
-
-@RunWith(JUnitPlatform.class)
-@SelectPackages("org.apache.hudi.functional")
-@IncludeTags("functional")
-public class HoodieSparkFunctionalTestSuiteA {
-}
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuiteB.java
 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuiteB.java
deleted file mode 100644
index b515bbf4f41..00000000000
--- 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/HoodieSparkFunctionalTestSuiteB.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hudi.functional;
-
-import org.junit.platform.runner.JUnitPlatform;
-import org.junit.platform.suite.api.IncludeTags;
-import org.junit.platform.suite.api.SelectPackages;
-import org.junit.runner.RunWith;
-
-@RunWith(JUnitPlatform.class)
-@SelectPackages("org.apache.hudi.functional")
-@IncludeTags("functional-b")
-public class HoodieSparkFunctionalTestSuiteB {
-}
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestBootstrapReadBase.java
 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestBootstrapReadBase.java
index 6103e0fd076..d7d4cffc1ca 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestBootstrapReadBase.java
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestBootstrapReadBase.java
@@ -41,7 +41,6 @@ import org.apache.spark.sql.SaveMode;
 import org.apache.spark.sql.functions;
 import org.junit.jupiter.api.AfterEach;
 import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Tag;
 import org.junit.jupiter.api.io.TempDir;
 
 import java.io.IOException;
@@ -54,7 +53,6 @@ import static 
org.apache.hudi.common.model.HoodieTableType.MERGE_ON_READ;
 import static 
org.apache.hudi.common.testutils.RawTripTestPayload.recordToString;
 import static org.junit.jupiter.api.Assertions.assertEquals;
 
-@Tag("functional")
 public abstract class TestBootstrapReadBase extends HoodieSparkClientTestBase {
 
   @TempDir
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/table/functional/TestHoodieSparkMergeOnReadTableInsertUpdateDelete.java
 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/table/functional/TestHoodieSparkMergeOnReadTableInsertUpdateDelete.java
index 1d74120624e..57ffb783d67 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/table/functional/TestHoodieSparkMergeOnReadTableInsertUpdateDelete.java
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/table/functional/TestHoodieSparkMergeOnReadTableInsertUpdateDelete.java
@@ -22,6 +22,7 @@ package org.apache.hudi.table.functional;
 import org.apache.hudi.client.SparkRDDWriteClient;
 import org.apache.hudi.client.WriteClientTestUtils;
 import org.apache.hudi.client.WriteStatus;
+import org.apache.hudi.common.config.HoodieReaderConfig;
 import org.apache.hudi.common.fs.FSUtils;
 import org.apache.hudi.common.model.FileSlice;
 import org.apache.hudi.common.model.HoodieAvroIndexedRecord;
@@ -79,6 +80,7 @@ import org.junit.jupiter.params.provider.ValueSource;
 import java.io.IOException;
 import java.net.URISyntaxException;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -115,6 +117,7 @@ public class 
TestHoodieSparkMergeOnReadTableInsertUpdateDelete extends SparkClie
 
     HoodieWriteConfig.Builder cfgBuilder = getConfigBuilder(true);
     addConfigsForPopulateMetaFields(cfgBuilder, populateMetaFields);
+    
cfgBuilder.withProps(Collections.singletonMap(HoodieReaderConfig.FILE_GROUP_READER_ENABLED.key(),
 Boolean.toString(fileFormat != HoodieFileFormat.HFILE)));
     HoodieWriteConfig cfg = cfgBuilder.build();
     try (SparkRDDWriteClient client = getHoodieWriteClient(cfg)) {
 
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/table/functional/TestHoodieSparkRollback.java
 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/table/functional/TestHoodieSparkRollback.java
index 1f4416ea354..1ac10ca83bd 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/table/functional/TestHoodieSparkRollback.java
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/table/functional/TestHoodieSparkRollback.java
@@ -188,6 +188,7 @@ public class TestHoodieSparkRollback extends 
SparkClientFunctionalTestHarness {
     if (failRollback) {
       copyOut(tableType, "002");
       //disable MDT so we don't copy it
+      client.close();
       client = getHoodieWriteClient(getConfigToTestMDTRollbacks(true, false));
       assertTrue(client.rollback("002", "003"));
       metaClient = HoodieTableMetaClient.reload(metaClient);
@@ -205,6 +206,7 @@ public class TestHoodieSparkRollback extends 
SparkClientFunctionalTestHarness {
     //now we are at a state that we would be at if a write failed after 
writing to MDT but before commit is finished
 
     //New update will trigger rollback and we will commit this time
+    client.close();
     client = getHoodieWriteClient(getConfigToTestMDTRollbacks(true, true));
     updateRecords(client, dataGen, "004", records);
     //validate that metadata table file listing matches reality
@@ -212,6 +214,7 @@ public class TestHoodieSparkRollback extends 
SparkClientFunctionalTestHarness {
     TestHoodieBackedMetadata.validateMetadata(cfg, Option.empty(), 
hoodieStorage(), basePath, metaClient, storageConf(), new 
HoodieSparkEngineContext(jsc()),
         TestHoodieBackedMetadata.metadata(client, hoodieStorage()), client, 
HoodieTimer.start());
     assertEquals(HoodieTableVersion.SIX, 
metaClient.getTableConfig().getTableVersion());
+    client.close();
   }
 
   private void copyOut(HoodieTableType tableType, String commitTime) throws 
IOException {
@@ -284,6 +287,7 @@ public class TestHoodieSparkRollback extends 
SparkClientFunctionalTestHarness {
     TestHoodieBackedMetadata.validateMetadata(cfg, Option.empty(), 
hoodieStorage(), basePath, metaClient,
         storageConf(), new HoodieSparkEngineContext(jsc()), 
TestHoodieBackedMetadata.metadata(client, hoodieStorage()), client, 
HoodieTimer.start());
     assertEquals(HoodieTableVersion.SIX, 
metaClient.getTableConfig().getTableVersion());
+    client.close();
   }
 
   /**
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/table/functional/TestSparkNonBlockingConcurrencyControl.java
 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/table/functional/TestSparkNonBlockingConcurrencyControl.java
index 44dc8363b55..eb400e13d1a 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/table/functional/TestSparkNonBlockingConcurrencyControl.java
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/table/functional/TestSparkNonBlockingConcurrencyControl.java
@@ -167,6 +167,8 @@ public class TestSparkNonBlockingConcurrencyControl extends 
SparkClientFunctiona
     // result is [(id1,Danny,23,2,par1)]
     Map<String, String> result = Collections.singletonMap("par1", 
"[id1,par1,id1,Danny,23,2,par1]");
     checkWrittenData(result, 1);
+    client1.close();
+    client2.close();
   }
 
   @Test
@@ -216,6 +218,8 @@ public class TestSparkNonBlockingConcurrencyControl extends 
SparkClientFunctiona
     // the data files belongs 3rd commit is not included in the last 
compaction.
     Map<String, String> result = Collections.singletonMap("par1", 
"[id1,par1,id1,Danny,null,1,par1]");
     checkWrittenData(result, 1);
+    client1.close();
+    client2.close();
   }
 
   // Validate that multiple writers will only produce base files for bulk 
insert
@@ -345,6 +349,8 @@ public class TestSparkNonBlockingConcurrencyControl extends 
SparkClientFunctiona
     // result is [(id1,Danny,23,2,par1)]
     Map<String, String> result = Collections.singletonMap("par1", 
"[id1,par1,id1,Danny,23,2,par1]");
     checkWrittenData(result, 1);
+    client1.close();
+    client2.close();
   }
 
   /**
@@ -391,6 +397,8 @@ public class TestSparkNonBlockingConcurrencyControl extends 
SparkClientFunctiona
           Option.empty(),
           metaClient.getCommitActionType());
     });
+    client1.close();
+    client2.close();
   }
 
   /**
@@ -437,6 +445,8 @@ public class TestSparkNonBlockingConcurrencyControl extends 
SparkClientFunctiona
           Option.empty(),
           metaClient.getCommitActionType());
     });
+    client1.close();
+    client2.close();
   }
 
   /**
@@ -491,6 +501,8 @@ public class TestSparkNonBlockingConcurrencyControl extends 
SparkClientFunctiona
     // result is [(id1,Danny,23,2,par1)]
     Map<String, String> result = Collections.singletonMap("par1", 
"[id1,par1,id1,Danny,23,2,par1]");
     checkWrittenData(result, 1);
+    client1.close();
+    client2.close();
   }
 
   /**
@@ -545,6 +557,8 @@ public class TestSparkNonBlockingConcurrencyControl extends 
SparkClientFunctiona
     // result is [(id1,Danny,23,2,par1)]
     Map<String, String> result = Collections.singletonMap("par1", 
"[id1,par1,id1,Danny,23,2,par1]");
     checkWrittenData(result, 1);
+    client1.close();
+    client2.close();
   }
 
   /**
@@ -585,6 +599,8 @@ public class TestSparkNonBlockingConcurrencyControl extends 
SparkClientFunctiona
     // result is [(id1,Danny,23,2,par1)]
     Map<String, String> result = Collections.singletonMap("par1", 
"[id1,par1,id1,Danny,23,2,par1]");
     checkWrittenData(result, 1);
+    client1.close();
+    client2.close();
   }
 
   private HoodieWriteConfig createHoodieWriteConfig() {
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/resources/exampleSchema.txt 
b/hudi-spark-datasource/hudi-spark/src/test/resources/exampleSchema.txt
index a311dc68543..9266c2df27b 100644
--- a/hudi-spark-datasource/hudi-spark/src/test/resources/exampleSchema.txt
+++ b/hudi-spark-datasource/hudi-spark/src/test/resources/exampleSchema.txt
@@ -30,12 +30,12 @@
         },
         {
             "name": "ts",
-            "type": ["long", "null"]
+            "type": ["null", "long"]
         },
         {
             "name": "_hoodie_is_deleted",
-            "type": ["boolean", "null"],
-            "default" : false
+            "type": ["null", "boolean"],
+            "default" : null
         }
     ]
 }
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/TestIncrementalQueryWithArchivedInstants.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/TestIncrementalQueryWithArchivedInstants.scala
index 8066233c7b7..966ac99d9a6 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/TestIncrementalQueryWithArchivedInstants.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/TestIncrementalQueryWithArchivedInstants.scala
@@ -28,23 +28,16 @@ import 
org.apache.hudi.testutils.SparkClientFunctionalTestHarness
 
 import org.apache.spark.sql.{Row, SaveMode}
 import org.apache.spark.sql.types.{LongType, StringType, StructField, 
StructType}
-import org.junit.{Before, Test}
+import org.junit.jupiter.api.{BeforeEach, Tag, Test}
 import org.junit.jupiter.api.Assertions.{assertDoesNotThrow, assertFalse}
-import org.junit.jupiter.api.Tag
 import org.junit.jupiter.api.function.Executable
 
-import java.nio.file.{Files, Path}
-
 @Tag("functional")
 class TestIncrementalQueryWithArchivedInstants extends 
SparkClientFunctionalTestHarness {
-  var tmpDir: Path = _
   var tblPath: String = _
 
-  override def basePath(): String = tmpDir.toAbsolutePath.toUri.toString
-
-  @Before
+  @BeforeEach
   def setUp(): Unit = {
-    tmpDir = Files.createTempDirectory("hudi_random")
     tblPath = basePath()
     super.runBeforeEach()
   }
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/HoodieStatsIndexTestBase.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/HoodieStatsIndexTestBase.scala
index 87aa1a961ee..3cf0a260c01 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/HoodieStatsIndexTestBase.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/HoodieStatsIndexTestBase.scala
@@ -98,7 +98,6 @@ class HoodieStatsIndexTestBase extends 
HoodieSparkClientTestBase {
     val lastInstant = getLastInstant()
     if (enforce || metaClient.getActiveTimeline.lastInstant().isEmpty
       || 
metaClient.getActiveTimeline.lastInstant().get().requestedTime.compareTo(lastInstant)
 < 0) {
-      println("Reloaded timeline")
       metaClient.reloadActiveTimeline()
       metaClient
     }
@@ -127,8 +126,9 @@ class HoodieStatsIndexTestBase extends 
HoodieSparkClientTestBase {
       mergedDfList = mergedDfList.take(mergedDfList.size - 1)
     }
     val writeConfig = getWriteConfig(hudiOpts)
-    new SparkRDDWriteClient(new HoodieSparkEngineContext(jsc), writeConfig)
-      .rollback(lastInstant.requestedTime)
+    val client = new SparkRDDWriteClient(new HoodieSparkEngineContext(jsc), 
writeConfig)
+    client.rollback(lastInstant.requestedTime)
+    client.close()
 
     if (lastInstant.getAction != ActionType.clean.name()) {
       assertEquals(ActionType.rollback.name(), 
getLatestMetaClient(true).getActiveTimeline.lastInstant().get().getAction)
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSource.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSource.scala
index 972b5502e51..874aa9c2c98 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSource.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestCOWDataSource.scala
@@ -1875,8 +1875,9 @@ class TestCOWDataSource extends HoodieSparkClientTestBase 
with ScalaAssertionSup
             metaClient.getActiveTimeline.getLastClusteringInstant.get)
         }
         // This should not schedule any new clustering
-        new SparkRDDWriteClient(context, writeConfig)
-          
.scheduleClustering(org.apache.hudi.common.util.Option.of(Map[String, 
String]().asJava))
+        val client = new SparkRDDWriteClient(context, writeConfig)
+        
client.scheduleClustering(org.apache.hudi.common.util.Option.of(Map[String, 
String]().asJava))
+        client.close()
         assertEquals(lastInstant.requestedTime,
           
metaClient.reloadActiveTimeline.getCommitsTimeline.lastInstant.get.requestedTime)
       }
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestPartitionStatsIndex.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestPartitionStatsIndex.scala
index 4f604678643..0309b4473a8 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestPartitionStatsIndex.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestPartitionStatsIndex.scala
@@ -421,6 +421,7 @@ class TestPartitionStatsIndex extends 
PartitionStatsIndexTestBase {
     // Do a savepoint
     val writeClient = new SparkRDDWriteClient(new 
HoodieSparkEngineContext(jsc), getWriteConfig(hudiOpts))
     writeClient.savepoint(firstCompletedInstant.get().requestedTime, 
"testUser", "savepoint to first commit")
+    writeClient.close()
     val savepointTimestamp = 
metaClient.reloadActiveTimeline().getSavePointTimeline.filterCompletedInstants().lastInstant().get().requestedTime
     assertEquals(firstCompletedInstant.get().requestedTime, savepointTimestamp)
     // Restore to savepoint
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestSecondaryIndexPruning.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestSecondaryIndexPruning.scala
index 799faece7bd..a6894c6b928 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestSecondaryIndexPruning.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/hudi/functional/TestSecondaryIndexPruning.scala
@@ -1318,6 +1318,7 @@ class TestSecondaryIndexPruning extends 
SparkClientFunctionalTestHarness {
     val firstCompletedInstant = 
metaClient.getActiveTimeline.getCommitsTimeline.filterCompletedInstants().lastInstant()
     val writeClient = new SparkRDDWriteClient(new 
HoodieSparkEngineContext(jsc), getWriteConfig(hudiOpts))
     writeClient.savepoint(firstCompletedInstant.get().requestedTime, 
"testUser", "savepoint to first commit")
+    writeClient.close()
     val savepointTimestamp = 
metaClient.reloadActiveTimeline().getSavePointTimeline.filterCompletedInstants().lastInstant().get().requestedTime
     assertEquals(firstCompletedInstant.get().requestedTime, savepointTimestamp)
     // Restore to savepoint
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/avro/TestSchemaConverters.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/avro/TestSchemaConverters.scala
index 45da6ad4d05..021a3e4afb3 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/avro/TestSchemaConverters.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/avro/TestSchemaConverters.scala
@@ -20,8 +20,8 @@ package org.apache.spark.sql.avro
 import org.apache.hudi.avro.model.HoodieMetadataColumnStats
 
 import org.apache.spark.sql.avro.SchemaConverters.SchemaType
-import org.junit.Test
 import org.junit.jupiter.api.Assertions.assertEquals
+import org.junit.jupiter.api.Test
 
 class TestSchemaConverters {
 
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/feature/index/TestExpressionIndex.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/feature/index/TestExpressionIndex.scala
index 5ea40b85592..f3960b674b5 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/feature/index/TestExpressionIndex.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/feature/index/TestExpressionIndex.scala
@@ -1981,6 +1981,7 @@ class TestExpressionIndex extends HoodieSparkSqlTestBase {
         
.withMetadataIndexColumnStats(false).withMetadataIndexPartitionStats(false).build())
       val writeClient = new SparkRDDWriteClient(new 
HoodieSparkEngineContext(new JavaSparkContext(spark.sparkContext)), 
configBuilder.build())
       writeClient.rollback(lastCompletedInstant.get().requestedTime)
+      writeClient.close()
       // validate the expression index
       checkAnswer(metadataSql)(
         // the last commit is rolledback so no records for that
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestTTLProcedure.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestTTLProcedure.scala
index ed324996cf4..7a8a42ff18e 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestTTLProcedure.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/procedure/TestTTLProcedure.scala
@@ -75,6 +75,7 @@ class TestTTLProcedure extends HoodieSparkProcedureTestBase 
with SparkDatasetMix
           Seq(partitionPath0),
           Seq(partitionPath1)
         )
+        client.close()
       }
       }
     }
diff --git 
a/hudi-sync/hudi-hive-sync/src/test/java/org/apache/hudi/hive/functional/HiveSyncFunctionalTestSuite.java
 
b/hudi-sync/hudi-hive-sync/src/test/java/org/apache/hudi/hive/functional/HiveSyncFunctionalTestSuite.java
deleted file mode 100644
index b194e73b9fb..00000000000
--- 
a/hudi-sync/hudi-hive-sync/src/test/java/org/apache/hudi/hive/functional/HiveSyncFunctionalTestSuite.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hudi.hive.functional;
-
-import org.junit.platform.runner.JUnitPlatform;
-import org.junit.platform.suite.api.IncludeTags;
-import org.junit.platform.suite.api.SelectPackages;
-import org.junit.runner.RunWith;
-
-@RunWith(JUnitPlatform.class)
-@SelectPackages("org.apache.hudi.hive.functional")
-@IncludeTags("functional")
-public class HiveSyncFunctionalTestSuite {
-
-}
-
diff --git a/hudi-tests-common/pom.xml b/hudi-tests-common/pom.xml
index 3a1591597ff..f3e7a71416b 100644
--- a/hudi-tests-common/pom.xml
+++ b/hudi-tests-common/pom.xml
@@ -117,25 +117,11 @@
             <artifactId>junit-platform-runner</artifactId>
             <scope>compile</scope>
         </dependency>
-        <dependency>
-            <groupId>org.junit.platform</groupId>
-            <artifactId>junit-platform-suite-api</artifactId>
-            <scope>compile</scope>
-        </dependency>
         <dependency>
             <groupId>org.junit.platform</groupId>
             <artifactId>junit-platform-commons</artifactId>
             <scope>compile</scope>
         </dependency>
-        <!-- This is to support @RunWith(JUnitPlatform.class),
-             which should be replaced by @ExtendWith in JUnit 5
-             TODO remove this junit4 lib HUDI-2516 -->
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <version>4.13.2</version>
-            <scope>compile</scope>
-        </dependency>
         <dependency>
             <groupId>org.mockito</groupId>
             <artifactId>mockito-inline</artifactId>
diff --git 
a/hudi-utilities/src/test/java/org/apache/hudi/utilities/deltastreamer/TestHoodieDeltaStreamer.java
 
b/hudi-utilities/src/test/java/org/apache/hudi/utilities/deltastreamer/TestHoodieDeltaStreamer.java
index 26caabc93da..cfbd4170e17 100644
--- 
a/hudi-utilities/src/test/java/org/apache/hudi/utilities/deltastreamer/TestHoodieDeltaStreamer.java
+++ 
b/hudi-utilities/src/test/java/org/apache/hudi/utilities/deltastreamer/TestHoodieDeltaStreamer.java
@@ -3394,11 +3394,12 @@ public class TestHoodieDeltaStreamer extends 
HoodieDeltaStreamerTestBase {
    * Return empty table.
    */
   public static class DropAllTransformer implements Transformer {
+    private static final Logger LOG = 
LoggerFactory.getLogger(DropAllTransformer.class);
 
     @Override
     public Dataset apply(JavaSparkContext jsc, SparkSession sparkSession, 
Dataset<Row> rowDataset,
                          TypedProperties properties) {
-      System.out.println("DropAllTransformer called !!");
+      LOG.info("DropAllTransformer called !!");
       return sparkSession.createDataFrame(jsc.emptyRDD(), rowDataset.schema());
     }
   }
diff --git 
a/hudi-utilities/src/test/java/org/apache/hudi/utilities/functional/UtilitiesFunctionalTestSuite.java
 
b/hudi-utilities/src/test/java/org/apache/hudi/utilities/functional/UtilitiesFunctionalTestSuite.java
deleted file mode 100644
index 98bba5b4eee..00000000000
--- 
a/hudi-utilities/src/test/java/org/apache/hudi/utilities/functional/UtilitiesFunctionalTestSuite.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hudi.utilities.functional;
-
-import org.junit.platform.runner.JUnitPlatform;
-import org.junit.platform.suite.api.IncludeTags;
-import org.junit.platform.suite.api.SelectPackages;
-import org.junit.runner.RunWith;
-
-@RunWith(JUnitPlatform.class)
-@SelectPackages("org.apache.hudi.utilities.functional")
-@IncludeTags("functional")
-public class UtilitiesFunctionalTestSuite {
-
-}
diff --git 
a/hudi-utilities/src/test/java/org/apache/hudi/utilities/multitable/TestHoodieMultiTableServicesMain.java
 
b/hudi-utilities/src/test/java/org/apache/hudi/utilities/multitable/TestHoodieMultiTableServicesMain.java
index 31c96029b66..6eefe96cd28 100644
--- 
a/hudi-utilities/src/test/java/org/apache/hudi/utilities/multitable/TestHoodieMultiTableServicesMain.java
+++ 
b/hudi-utilities/src/test/java/org/apache/hudi/utilities/multitable/TestHoodieMultiTableServicesMain.java
@@ -235,18 +235,19 @@ class TestHoodieMultiTableServicesMain extends 
HoodieCommonTestHarness implement
     // enable files and bloom_filters on the regular write client
     HoodieWriteConfig writeConfig = writeConfigBuilder.build();
     // do one upsert with synchronous metadata update
-    SparkRDDWriteClient writeClient = new SparkRDDWriteClient(context, 
writeConfig);
-    List<HoodieRecord> records;
-    WriteClientTestUtils.startCommitWithTime(writeClient, instant);
-    if (update) {
-      records = dataGen.generateUpdates(instant, 100);
-    } else {
-      records = dataGen.generateInserts(instant, 100);
+    try (SparkRDDWriteClient writeClient = new SparkRDDWriteClient(context, 
writeConfig)) {
+      List<HoodieRecord> records;
+      WriteClientTestUtils.startCommitWithTime(writeClient, instant);
+      if (update) {
+        records = dataGen.generateUpdates(instant, 100);
+      } else {
+        records = dataGen.generateInserts(instant, 100);
+      }
+      JavaRDD<WriteStatus> result = 
writeClient.upsert(jsc.parallelize(records, 8), instant);
+      List<WriteStatus> statuses = result.collect();
+      assertNoWriteErrors(statuses);
+      writeClient.commit(instant, jsc().parallelize(statuses));
     }
-    JavaRDD<WriteStatus> result = writeClient.upsert(jsc.parallelize(records, 
8), instant);
-    List<WriteStatus> statuses = result.collect();
-    assertNoWriteErrors(statuses);
-    writeClient.commit(instant, jsc().parallelize(statuses));
   }
 
   private HoodieWriteConfig.Builder getWriteConfigBuilder(StoragePath 
basePath, String tableName) {
diff --git 
a/hudi-utilities/src/test/java/org/apache/hudi/utilities/testutils/UtilitiesTestBase.java
 
b/hudi-utilities/src/test/java/org/apache/hudi/utilities/testutils/UtilitiesTestBase.java
index 5beb41a8695..5586426aa02 100644
--- 
a/hudi-utilities/src/test/java/org/apache/hudi/utilities/testutils/UtilitiesTestBase.java
+++ 
b/hudi-utilities/src/test/java/org/apache/hudi/utilities/testutils/UtilitiesTestBase.java
@@ -166,7 +166,7 @@ public class UtilitiesTestBase {
       clearHiveDb(basePath + "/dummy" + System.currentTimeMillis());
     }
 
-    jsc = UtilHelpers.buildSparkContext(UtilitiesTestBase.class.getName() + 
"-hoodie", "local[4]", sparkConf());
+    jsc = UtilHelpers.buildSparkContext(UtilitiesTestBase.class.getName() + 
"-hoodie", "local[4,1]", sparkConf());
     context = new HoodieSparkEngineContext(jsc);
     sqlContext = new SQLContext(jsc);
     sparkSession = SparkSession.builder().config(jsc.getConf()).getOrCreate();
diff --git a/pom.xml b/pom.xml
index 93b98362080..aeb85952c2b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -111,9 +111,9 @@
     <glassfish.version>2.17</glassfish.version>
     <glassfish.el.version>3.0.1-b12</glassfish.el.version>
     <parquet.version>1.10.1</parquet.version>
-    <junit.jupiter.version>5.8.2</junit.jupiter.version>
-    <junit.vintage.version>5.8.2</junit.vintage.version>
-    <junit.platform.version>1.8.2</junit.platform.version>
+    <junit.jupiter.version>5.13.0</junit.jupiter.version>
+    <junit.vintage.version>5.13.0</junit.vintage.version>
+    <junit.platform.version>1.13.0</junit.platform.version>
     <mockito.jupiter.version>3.12.4</mockito.jupiter.version>
     <log4j2.version>2.17.2</log4j2.version>
     <slf4j.version>1.7.36</slf4j.version>
@@ -1599,6 +1599,12 @@
         <version>${junit.jupiter.version}</version>
         <scope>test</scope>
       </dependency>
+      <dependency>
+        <groupId>org.junit.jupiter</groupId>
+        <artifactId>junit-jupiter</artifactId>
+        <version>${junit.jupiter.version}</version>
+        <scope>test</scope>
+      </dependency>
       <dependency>
         <groupId>org.junit.jupiter</groupId>
         <artifactId>junit-jupiter-engine</artifactId>
@@ -1647,6 +1653,13 @@
         <version>${junit.platform.version}</version>
         <scope>test</scope>
       </dependency>
+      <!-- Junit 4 -->
+      <dependency>
+        <groupId>junit</groupId>
+        <artifactId>junit</artifactId>
+        <version>4.13.2</version>
+        <scope>test</scope>
+      </dependency>
 
       <!-- Kryo -->
       <dependency>
@@ -1905,9 +1918,6 @@
               
<forkedProcessExitTimeoutInSeconds>120</forkedProcessExitTimeoutInSeconds>
               <excludedGroups>functional,functional-b</excludedGroups>
               <excludes>
-                <exclude>**/*FunctionalTestSuite.java</exclude>
-                <exclude>**/*FunctionalTestSuiteA.java</exclude>
-                <exclude>**/*FunctionalTestSuiteB.java</exclude>
                 <exclude>**/IT*.java</exclude>
                 <exclude>**/testsuite/**/Test*.java</exclude>
               </excludes>
@@ -1953,21 +1963,11 @@
             <groupId>org.apache.maven.plugins</groupId>
             <artifactId>maven-surefire-plugin</artifactId>
             <version>${maven-surefire-plugin.version}</version>
-            <dependencies>
-              <dependency>
-                <groupId>org.apache.maven.surefire</groupId>
-                <artifactId>surefire-junit47</artifactId>
-                <version>${maven-surefire-plugin.version}</version>
-              </dependency>
-            </dependencies>
             <configuration combine.self="append">
               <skip>${skipFTs}</skip>
               <forkCount>1</forkCount>
               <reuseForks>true</reuseForks>
-              <includes>
-                <include>**/*FunctionalTestSuite.java</include>
-                <include>**/*FunctionalTestSuiteA.java</include>
-              </includes>
+              <groups>functional</groups>
             </configuration>
           </plugin>
           <plugin>
@@ -2010,20 +2010,11 @@
             <groupId>org.apache.maven.plugins</groupId>
             <artifactId>maven-surefire-plugin</artifactId>
             <version>${maven-surefire-plugin.version}</version>
-            <dependencies>
-              <dependency>
-                <groupId>org.apache.maven.surefire</groupId>
-                <artifactId>surefire-junit47</artifactId>
-                <version>${maven-surefire-plugin.version}</version>
-              </dependency>
-            </dependencies>
             <configuration combine.self="append">
               <skip>${skipFTs}</skip>
               <forkCount>1</forkCount>
               <reuseForks>true</reuseForks>
-              <includes>
-                <include>**/*FunctionalTestSuiteB.java</include>
-              </includes>
+              <groups>functional-b</groups>
             </configuration>
           </plugin>
           <plugin>

Reply via email to