This is an automated email from the ASF dual-hosted git repository.

ruifengz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 6831481fd7a2 [SPARK-55023][CORE][SQL][TESTS] Remove unnecessary 
`BeforeAndAfterAll` and `BeforeAndAfterEach`
6831481fd7a2 is described below

commit 6831481fd7a2d30dfa16b4b70c8e6296b4deeb8c
Author: Ruifeng Zheng <[email protected]>
AuthorDate: Wed Jan 14 18:56:39 2026 +0800

    [SPARK-55023][CORE][SQL][TESTS] Remove unnecessary `BeforeAndAfterAll` and 
`BeforeAndAfterEach`
    
    ### What changes were proposed in this pull request?
    Remove unnecessary `BeforeAndAfterAll` and `BeforeAndAfterEach`
    
    ### Why are the changes needed?
    `SparkFunSuite` and its subclasses already extends `BeforeAndAfterAll` and 
`BeforeAndAfterEach`
    
    Code clean up for further refactoring
    
    ### Does this PR introduce _any_ user-facing change?
    NO, test-only
    
    ### How was this patch tested?
    CI
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #53787 from zhengruifeng/del_before_and_after.
    
    Authored-by: Ruifeng Zheng <[email protected]>
    Signed-off-by: Ruifeng Zheng <[email protected]>
---
 .../test/scala/org/apache/spark/ExternalShuffleServiceSuite.scala  | 3 +--
 core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala  | 3 +--
 core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala       | 4 ++--
 .../test/scala/org/apache/spark/ShuffleOldFetchProtocolSuite.scala | 4 +---
 core/src/test/scala/org/apache/spark/SortShuffleSuite.scala        | 3 +--
 .../scala/org/apache/spark/deploy/DecommissionWorkerSuite.scala    | 4 +---
 .../test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala    | 3 ---
 .../test/scala/org/apache/spark/deploy/SparkPipelinesSuite.scala   | 4 +---
 core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala | 2 --
 .../org/apache/spark/deploy/StandaloneDynamicAllocationSuite.scala | 3 +--
 .../test/scala/org/apache/spark/deploy/client/AppClientSuite.scala | 2 --
 .../scala/org/apache/spark/executor/ExecutorClassLoaderSuite.scala | 2 --
 .../org/apache/spark/io/ChunkedByteBufferFileRegionSuite.scala     | 4 +---
 .../spark/network/netty/NettyBlockTransferServiceSuite.scala       | 2 --
 .../spark/shuffle/sort/BypassMergeSortShuffleWriterSuite.scala     | 2 --
 .../src/test/scala/org/apache/spark/storage/MemoryStoreSuite.scala | 1 -
 .../org/apache/spark/storage/PartiallySerializedBlockSuite.scala   | 3 +--
 .../test/scala/org/apache/spark/util/ClosureCleanerSuite2.scala    | 4 +---
 core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala | 3 +--
 .../scala/org/apache/spark/sql/catalyst/SchemaPruningTest.scala    | 4 +---
 .../expressions/codegen/BufferHolderSparkSubmitSuite.scala         | 3 +--
 .../test/scala/org/apache/spark/sql/PlanGenerationTestSuite.scala  | 7 +------
 .../test/scala/org/apache/spark/sql/application/ReplE2ESuite.scala | 3 +--
 .../scala/org/apache/spark/sql/connect/ClientDatasetSuite.scala    | 3 +--
 .../scala/org/apache/spark/sql/connect/SQLImplicitsTestSuite.scala | 3 +--
 .../scala/org/apache/spark/sql/connect/client/ArtifactSuite.scala  | 3 +--
 .../spark/sql/connect/client/SparkConnectClientRetriesSuite.scala  | 6 +-----
 .../apache/spark/sql/connect/client/SparkConnectClientSuite.scala  | 3 +--
 .../apache/spark/sql/connect/client/arrow/ArrowEncoderSuite.scala  | 3 +--
 .../sql/connect/streaming/TransformWithStateConnectSuite.scala     | 7 +------
 .../scala/org/apache/spark/sql/connect/test/ConnectFunSuite.scala  | 6 +++++-
 .../spark/sql/connect/service/SparkConnectCloneSessionSuite.scala  | 4 +---
 .../sql/connect/service/SparkConnectSessionManagerSuite.scala      | 3 +--
 .../org/apache/spark/sql/DataFrameSessionWindowingSuite.scala      | 5 +----
 .../apache/spark/sql/execution/BaseScriptTransformationSuite.scala | 4 +---
 .../spark/sql/execution/WholeStageCodegenSparkSubmitSuite.scala    | 3 +--
 .../datasources/jdbc/connection/ConnectionProviderSuiteBase.scala  | 4 +---
 .../spark/sql/execution/datasources/orc/OrcSourceSuite.scala       | 3 +--
 .../org/apache/spark/sql/execution/datasources/orc/OrcTest.scala   | 4 +---
 .../spark/sql/execution/datasources/xml/XmlPartitioningSuite.scala | 3 +--
 .../datasources/xml/parsers/StaxXmlParserUtilsSuite.scala          | 4 +---
 .../execution/python/streaming/BaseStreamingArrowWriterSuite.scala | 3 +--
 .../src/test/scala/org/apache/spark/sql/streaming/StreamTest.scala | 4 ++--
 .../spark/sql/streaming/continuous/EpochCoordinatorSuite.scala     | 4 +---
 .../spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala      | 3 +--
 .../org/apache/spark/sql/hive/thriftserver/UISeleniumSuite.scala   | 3 +--
 .../scala/org/apache/spark/sql/hive/CompressionCodecSuite.scala    | 3 +--
 .../test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala  | 4 +---
 .../scala/org/apache/spark/sql/hive/HiveSchemaInferenceSuite.scala | 4 +---
 .../scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala     | 2 --
 .../src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala | 5 +----
 .../org/apache/spark/sql/hive/PartitionedTablePerfStatsSuite.scala | 4 +---
 sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala   | 5 +----
 .../apache/spark/sql/hive/client/HivePartitionFilteringSuite.scala | 3 +--
 .../org/apache/spark/sql/hive/execution/HiveComparisonTest.scala   | 4 +---
 .../scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala   | 3 +--
 .../scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala | 4 +---
 .../scala/org/apache/spark/sql/hive/test/TestHiveSingleton.scala   | 4 +---
 .../spark/sql/pipelines/logging/ConstructPipelineEventSuite.scala  | 4 +---
 59 files changed, 57 insertions(+), 153 deletions(-)

diff --git 
a/core/src/test/scala/org/apache/spark/ExternalShuffleServiceSuite.scala 
b/core/src/test/scala/org/apache/spark/ExternalShuffleServiceSuite.scala
index e9a0c405b0d9..d86fbc850d1f 100644
--- a/core/src/test/scala/org/apache/spark/ExternalShuffleServiceSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ExternalShuffleServiceSuite.scala
@@ -26,7 +26,6 @@ import scala.collection
 import scala.concurrent.Promise
 import scala.concurrent.duration.Duration
 
-import org.scalatest.BeforeAndAfterAll
 import org.scalatest.concurrent.Eventually
 import org.scalatest.matchers.should.Matchers._
 import org.scalatest.time.SpanSugar._
@@ -46,7 +45,7 @@ import org.apache.spark.util.io.ChunkedByteBuffer
  * set up in `ExternalBlockHandler`, such as changing the format of shuffle 
files or how
  * we hash files into folders.
  */
-class ExternalShuffleServiceSuite extends ShuffleSuite with BeforeAndAfterAll 
with Eventually {
+class ExternalShuffleServiceSuite extends ShuffleSuite with Eventually {
   var server: TransportServer = _
   var transportContext: TransportContext = _
   var rpcHandler: ExternalBlockHandler = _
diff --git a/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala 
b/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
index c0a8f2ecdd49..817dbab2c22b 100644
--- a/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
+++ b/core/src/test/scala/org/apache/spark/HeartbeatReceiverSuite.scala
@@ -25,7 +25,7 @@ import scala.concurrent.duration._
 
 import org.mockito.ArgumentMatchers.{any, eq => meq}
 import org.mockito.Mockito.{mock, spy, verify, when}
-import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester}
+import org.scalatest.PrivateMethodTester
 import org.scalatest.concurrent.Eventually._
 
 import org.apache.spark.executor.{ExecutorMetrics, TaskMetrics}
@@ -44,7 +44,6 @@ import org.apache.spark.util.{ManualClock, ThreadUtils}
  */
 class HeartbeatReceiverSuite
   extends SparkFunSuite
-  with BeforeAndAfterEach
   with PrivateMethodTester
   with LocalSparkContext {
 
diff --git a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala 
b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
index d7f9e248dfe5..de47c7936035 100644
--- a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
@@ -18,12 +18,12 @@
 package org.apache.spark
 
 import org.scalactic.source.Position
-import org.scalatest.{BeforeAndAfterAll, Tag}
+import org.scalatest.Tag
 
 import org.apache.spark.network.util.IOMode
 import org.apache.spark.util.Utils
 
-abstract class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll {
+abstract class ShuffleNettySuite extends ShuffleSuite {
 
   // This test suite should run all tests in ShuffleSuite with Netty shuffle 
mode.
 
diff --git 
a/core/src/test/scala/org/apache/spark/ShuffleOldFetchProtocolSuite.scala 
b/core/src/test/scala/org/apache/spark/ShuffleOldFetchProtocolSuite.scala
index a878593ba601..92141c3cf34a 100644
--- a/core/src/test/scala/org/apache/spark/ShuffleOldFetchProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleOldFetchProtocolSuite.scala
@@ -17,9 +17,7 @@
 
 package org.apache.spark
 
-import org.scalatest.BeforeAndAfterAll
-
-class ShuffleOldFetchProtocolSuite extends ShuffleSuite with BeforeAndAfterAll 
{
+class ShuffleOldFetchProtocolSuite extends ShuffleSuite {
 
   // This test suite should run all tests by setting 
spark.shuffle.useOldFetchProtocol=true.
   override def beforeAll(): Unit = {
diff --git a/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala 
b/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala
index e11194eb9013..03d9d5e2ce62 100644
--- a/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SortShuffleSuite.scala
@@ -19,7 +19,6 @@ package org.apache.spark
 
 import scala.jdk.CollectionConverters._
 
-import org.scalatest.BeforeAndAfterAll
 import org.scalatest.matchers.should.Matchers._
 
 import org.apache.spark.internal.config.{SHUFFLE_CHECKSUM_ALGORITHM, 
SHUFFLE_MANAGER}
@@ -28,7 +27,7 @@ import org.apache.spark.serializer.{JavaSerializer, 
KryoSerializer}
 import org.apache.spark.shuffle.sort.SortShuffleManager
 import org.apache.spark.util.Utils
 
-class SortShuffleSuite extends ShuffleSuite with BeforeAndAfterAll {
+class SortShuffleSuite extends ShuffleSuite {
 
   // This test suite should run all tests in ShuffleSuite with sort-based 
shuffle.
   override def beforeAll(): Unit = {
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/DecommissionWorkerSuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/DecommissionWorkerSuite.scala
index 20993df718a3..3c3ea40ea442 100644
--- a/core/src/test/scala/org/apache/spark/deploy/DecommissionWorkerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/DecommissionWorkerSuite.scala
@@ -24,7 +24,6 @@ import scala.collection.mutable
 import scala.concurrent.duration._
 import scala.jdk.CollectionConverters._
 
-import org.scalatest.BeforeAndAfterEach
 import org.scalatest.concurrent.Eventually._
 
 import org.apache.spark._
@@ -44,8 +43,7 @@ import org.apache.spark.util.Utils
 class DecommissionWorkerSuite
   extends SparkFunSuite
     with Logging
-    with LocalSparkContext
-    with BeforeAndAfterEach {
+    with LocalSparkContext {
 
   private var masterAndWorkerConf: SparkConf = null
   private var masterAndWorkerSecurityManager: SecurityManager = null
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala
index 511889e73270..84e6b6971c27 100644
--- a/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/RPackageUtilsSuite.scala
@@ -26,8 +26,6 @@ import java.util.zip.ZipFile
 import scala.collection.mutable.ArrayBuffer
 import scala.jdk.CollectionConverters._
 
-import org.scalatest.BeforeAndAfterEach
-
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.api.r.RUtils
 import org.apache.spark.util.{IvyTestUtils, ResetSystemProperties, Utils}
@@ -35,7 +33,6 @@ import org.apache.spark.util.MavenUtils.MavenCoordinate
 
 class RPackageUtilsSuite
   extends SparkFunSuite
-  with BeforeAndAfterEach
   with ResetSystemProperties {
 
   private val main = MavenCoordinate("a", "b", "c")
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/SparkPipelinesSuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/SparkPipelinesSuite.scala
index 60e279ba2ddc..0fcf116b04a2 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkPipelinesSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkPipelinesSuite.scala
@@ -17,11 +17,9 @@
 
 package org.apache.spark.deploy
 
-import org.scalatest.BeforeAndAfterEach
-
 import org.apache.spark.SparkUserAppException
 
-class SparkPipelinesSuite extends SparkSubmitTestUtils with BeforeAndAfterEach 
{
+class SparkPipelinesSuite extends SparkSubmitTestUtils {
   test("only spark submit args") {
     val args = Array(
       "--remote",
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index eda86fdeb035..9d05c4385ac4 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -27,7 +27,6 @@ import scala.io.{Codec, Source}
 
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.{FileStatus, FSDataInputStream, Path}
-import org.scalatest.BeforeAndAfterEach
 import org.scalatest.matchers.must.Matchers
 import org.scalatest.matchers.should.Matchers._
 
@@ -104,7 +103,6 @@ trait TestPrematureExit {
 class SparkSubmitSuite
   extends SparkSubmitTestUtils
   with Matchers
-  with BeforeAndAfterEach
   with ResetSystemProperties
   with TestPrematureExit {
 
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/StandaloneDynamicAllocationSuite.scala
 
b/core/src/test/scala/org/apache/spark/deploy/StandaloneDynamicAllocationSuite.scala
index 487a90f157a9..90ef0aa510c2 100644
--- 
a/core/src/test/scala/org/apache/spark/deploy/StandaloneDynamicAllocationSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/deploy/StandaloneDynamicAllocationSuite.scala
@@ -22,7 +22,7 @@ import scala.concurrent.duration._
 
 import org.mockito.ArgumentMatchers.any
 import org.mockito.Mockito.{mock, when}
-import org.scalatest.{BeforeAndAfterAll, PrivateMethodTester}
+import org.scalatest.PrivateMethodTester
 import org.scalatest.concurrent.Eventually._
 
 import org.apache.spark._
@@ -43,7 +43,6 @@ import 
org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.{Launched
 class StandaloneDynamicAllocationSuite
   extends SparkFunSuite
   with LocalSparkContext
-  with BeforeAndAfterAll
   with PrivateMethodTester {
 
   private val numWorkers = 2
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala
index eb6b4b23c61c..877aee47cd65 100644
--- a/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/client/AppClientSuite.scala
@@ -22,7 +22,6 @@ import java.util.concurrent.{ConcurrentHashMap, 
ConcurrentLinkedQueue}
 
 import scala.concurrent.duration._
 
-import org.scalatest.BeforeAndAfterAll
 import org.scalatest.concurrent.{Eventually, ScalaFutures}
 
 import org.apache.spark._
@@ -43,7 +42,6 @@ import org.apache.spark.util.Utils
 class AppClientSuite
     extends SparkFunSuite
     with LocalSparkContext
-    with BeforeAndAfterAll
     with Eventually
     with ScalaFutures {
   private val numWorkers = 2
diff --git 
a/core/src/test/scala/org/apache/spark/executor/ExecutorClassLoaderSuite.scala 
b/core/src/test/scala/org/apache/spark/executor/ExecutorClassLoaderSuite.scala
index fbb52971960c..09d7e2d690bc 100644
--- 
a/core/src/test/scala/org/apache/spark/executor/ExecutorClassLoaderSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/executor/ExecutorClassLoaderSuite.scala
@@ -33,7 +33,6 @@ import org.mockito.ArgumentMatchers.{any, anyString}
 import org.mockito.Mockito._
 import org.mockito.invocation.InvocationOnMock
 import org.mockito.stubbing.Answer
-import org.scalatest.BeforeAndAfterAll
 import org.scalatestplus.mockito.MockitoSugar
 
 import org.apache.spark._
@@ -44,7 +43,6 @@ import org.apache.spark.util.Utils
 
 class ExecutorClassLoaderSuite
   extends SparkFunSuite
-  with BeforeAndAfterAll
   with MockitoSugar
   with Logging {
 
diff --git 
a/core/src/test/scala/org/apache/spark/io/ChunkedByteBufferFileRegionSuite.scala
 
b/core/src/test/scala/org/apache/spark/io/ChunkedByteBufferFileRegionSuite.scala
index 551c0f1a7324..4ea2aa92094b 100644
--- 
a/core/src/test/scala/org/apache/spark/io/ChunkedByteBufferFileRegionSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/io/ChunkedByteBufferFileRegionSuite.scala
@@ -22,15 +22,13 @@ import java.nio.channels.WritableByteChannel
 import scala.util.Random
 
 import org.mockito.Mockito.when
-import org.scalatest.BeforeAndAfterEach
 import org.scalatestplus.mockito.MockitoSugar
 
 import org.apache.spark.{SparkConf, SparkEnv, SparkFunSuite}
 import org.apache.spark.internal.config
 import org.apache.spark.util.io.ChunkedByteBuffer
 
-class ChunkedByteBufferFileRegionSuite extends SparkFunSuite with MockitoSugar
-    with BeforeAndAfterEach {
+class ChunkedByteBufferFileRegionSuite extends SparkFunSuite with MockitoSugar 
{
 
   override protected def beforeEach(): Unit = {
     super.beforeEach()
diff --git 
a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala
 
b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala
index 62105f1d5146..447f2ed3f93b 100644
--- 
a/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala
@@ -25,7 +25,6 @@ import scala.util.Random
 
 import org.mockito.ArgumentMatchers.any
 import org.mockito.Mockito.{mock, when}
-import org.scalatest.BeforeAndAfterEach
 import org.scalatest.matchers.must.Matchers
 import org.scalatest.matchers.should.Matchers._
 
@@ -38,7 +37,6 @@ import org.apache.spark.serializer.{JavaSerializer, 
SerializerManager}
 
 class NettyBlockTransferServiceSuite
   extends SparkFunSuite
-  with BeforeAndAfterEach
   with Matchers {
 
   private var service0: NettyBlockTransferService = _
diff --git 
a/core/src/test/scala/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriterSuite.scala
 
b/core/src/test/scala/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriterSuite.scala
index c908c06b399d..c9b951cf0369 100644
--- 
a/core/src/test/scala/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriterSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/shuffle/sort/BypassMergeSortShuffleWriterSuite.scala
@@ -28,7 +28,6 @@ import org.mockito.{Mock, MockitoAnnotations}
 import org.mockito.Answers.RETURNS_SMART_NULLS
 import org.mockito.ArgumentMatchers.{any, anyInt, anyLong}
 import org.mockito.Mockito._
-import org.scalatest.BeforeAndAfterEach
 
 import org.apache.spark._
 import org.apache.spark.executor.{ShuffleWriteMetrics, TaskMetrics}
@@ -44,7 +43,6 @@ import org.apache.spark.util.Utils
 
 class BypassMergeSortShuffleWriterSuite
   extends SparkFunSuite
-    with BeforeAndAfterEach
     with ShuffleChecksumTestHelper {
 
   @Mock(answer = RETURNS_SMART_NULLS) private var blockManager: BlockManager = 
_
diff --git 
a/core/src/test/scala/org/apache/spark/storage/MemoryStoreSuite.scala 
b/core/src/test/scala/org/apache/spark/storage/MemoryStoreSuite.scala
index ab8c465074f1..727f3429cf57 100644
--- a/core/src/test/scala/org/apache/spark/storage/MemoryStoreSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/MemoryStoreSuite.scala
@@ -38,7 +38,6 @@ case class OffHeapValue(override val estimatedSize: Long) 
extends KnownSizeEstim
 class MemoryStoreSuite
   extends SparkFunSuite
   with PrivateMethodTester
-  with BeforeAndAfterEach
   with ResetSystemProperties {
 
   val conf: SparkConf = new SparkConf(false)
diff --git 
a/core/src/test/scala/org/apache/spark/storage/PartiallySerializedBlockSuite.scala
 
b/core/src/test/scala/org/apache/spark/storage/PartiallySerializedBlockSuite.scala
index 5582524ffeef..3bf04fa19e55 100644
--- 
a/core/src/test/scala/org/apache/spark/storage/PartiallySerializedBlockSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/storage/PartiallySerializedBlockSuite.scala
@@ -24,7 +24,7 @@ import scala.reflect.ClassTag
 import org.mockito.Mockito
 import org.mockito.Mockito.atLeastOnce
 import org.mockito.invocation.InvocationOnMock
-import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester}
+import org.scalatest.PrivateMethodTester
 
 import org.apache.spark.{SparkConf, SparkException, SparkFunSuite, 
TaskContext, TaskContextImpl}
 import org.apache.spark.memory.MemoryMode
@@ -35,7 +35,6 @@ import org.apache.spark.util.io.{ChunkedByteBuffer, 
ChunkedByteBufferOutputStrea
 
 class PartiallySerializedBlockSuite
     extends SparkFunSuite
-    with BeforeAndAfterEach
     with PrivateMethodTester {
 
   private val blockId = new TestBlockId("test")
diff --git 
a/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite2.scala 
b/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite2.scala
index 88b791783836..85a89aaede95 100644
--- a/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite2.scala
+++ b/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite2.scala
@@ -19,8 +19,6 @@ package org.apache.spark.util
 
 import java.io.NotSerializableException
 
-import org.scalatest.BeforeAndAfterAll
-
 import org.apache.spark.{SparkContext, SparkException, SparkFunSuite}
 import org.apache.spark.serializer.SerializerInstance
 
@@ -28,7 +26,7 @@ import org.apache.spark.serializer.SerializerInstance
  * Another test suite for the closure cleaner that is finer-grained.
  * For tests involving end-to-end Spark jobs, see {{ClosureCleanerSuite}}.
  */
-class ClosureCleanerSuite2 extends SparkFunSuite with BeforeAndAfterAll {
+class ClosureCleanerSuite2 extends SparkFunSuite {
 
   // Start a SparkContext so that the closure serializer is accessible
   // We do not actually use this explicitly otherwise
diff --git a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala 
b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
index a7908d89288a..8f414305f3ee 100644
--- a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.util
 
 import scala.collection.mutable.ArrayBuffer
 
-import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester}
+import org.scalatest.PrivateMethodTester
 
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.internal.config.Tests.TEST_USE_COMPRESSED_OOPS_KEY
@@ -71,7 +71,6 @@ class DummyClass8 extends KnownSizeEstimation {
 
 class SizeEstimatorSuite
   extends SparkFunSuite
-  with BeforeAndAfterEach
   with PrivateMethodTester
   with ResetSystemProperties {
 
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SchemaPruningTest.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SchemaPruningTest.scala
index 68e76fc013c1..afa1cca94bcf 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SchemaPruningTest.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SchemaPruningTest.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.sql.catalyst
 
-import org.scalatest.BeforeAndAfterAll
-
 import org.apache.spark.sql.catalyst.plans.PlanTest
 import org.apache.spark.sql.internal.SQLConf.NESTED_SCHEMA_PRUNING_ENABLED
 
@@ -26,7 +24,7 @@ import 
org.apache.spark.sql.internal.SQLConf.NESTED_SCHEMA_PRUNING_ENABLED
  * A PlanTest that ensures that all tests in this suite are run with nested 
schema pruning enabled.
  * Remove this trait once the default value of 
SQLConf.NESTED_SCHEMA_PRUNING_ENABLED is set to true.
  */
-private[sql] trait SchemaPruningTest extends PlanTest with BeforeAndAfterAll {
+private[sql] trait SchemaPruningTest extends PlanTest {
   private var originalConfSchemaPruningEnabled = false
 
   override protected def beforeAll(): Unit = {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/codegen/BufferHolderSparkSubmitSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/codegen/BufferHolderSparkSubmitSuite.scala
index b0ed1ecabb8d..24977cc1267b 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/codegen/BufferHolderSparkSubmitSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/codegen/BufferHolderSparkSubmitSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.catalyst.expressions.codegen
 
-import org.scalatest.{Assertions, BeforeAndAfterEach}
+import org.scalatest.Assertions
 import org.scalatest.concurrent.Eventually.{eventually, interval, timeout}
 import org.scalatest.matchers.must.Matchers
 import org.scalatest.time.SpanSugar._
@@ -33,7 +33,6 @@ import org.apache.spark.util.ResetSystemProperties
 class BufferHolderSparkSubmitSuite
   extends SparkSubmitTestUtils
     with Matchers
-    with BeforeAndAfterEach
     with ResetSystemProperties {
 
   test("SPARK-22222: Buffer holder should be able to allocate memory larger 
than 1GB") {
diff --git 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/PlanGenerationTestSuite.scala
 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/PlanGenerationTestSuite.scala
index b5eabb82b88d..a21de2dbfaa6 100644
--- 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/PlanGenerationTestSuite.scala
+++ 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/PlanGenerationTestSuite.scala
@@ -28,7 +28,6 @@ import com.google.protobuf
 import com.google.protobuf.util.JsonFormat
 import com.google.protobuf.util.JsonFormat.TypeRegistry
 import io.grpc.inprocess.InProcessChannelBuilder
-import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
 
 import org.apache.spark.connect.proto
 import org.apache.spark.connect.proto.StorageLevel
@@ -77,11 +76,7 @@ import org.apache.spark.util.SparkFileUtils
  * `sql/connect/server` module
  */
 // scalastyle:on
-class PlanGenerationTestSuite
-    extends ConnectFunSuite
-    with BeforeAndAfterAll
-    with BeforeAndAfterEach
-    with Logging {
+class PlanGenerationTestSuite extends ConnectFunSuite with Logging {
 
   // Borrowed from SparkFunSuite
   private val regenerateGoldenFiles: Boolean = 
System.getenv("SPARK_GENERATE_GOLDEN_FILES") == "1"
diff --git 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/application/ReplE2ESuite.scala
 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/application/ReplE2ESuite.scala
index c07b624e8f8f..30904fd0a9f7 100644
--- 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/application/ReplE2ESuite.scala
+++ 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/application/ReplE2ESuite.scala
@@ -23,7 +23,6 @@ import java.util.concurrent.{Executors, Semaphore, TimeUnit}
 import scala.util.Properties
 
 import org.apache.commons.io.output.ByteArrayOutputStream
-import org.scalatest.BeforeAndAfterEach
 
 import org.apache.spark.sql.connect.test.{ConnectFunSuite, 
IntegrationTestUtils, RemoteSparkSession}
 import org.apache.spark.tags.AmmoniteTest
@@ -31,7 +30,7 @@ import org.apache.spark.util.IvyTestUtils
 import org.apache.spark.util.MavenUtils.MavenCoordinate
 
 @AmmoniteTest
-class ReplE2ESuite extends ConnectFunSuite with RemoteSparkSession with 
BeforeAndAfterEach {
+class ReplE2ESuite extends ConnectFunSuite with RemoteSparkSession {
 
   private val executorService = Executors.newSingleThreadExecutor()
   private val TIMEOUT_SECONDS = 30
diff --git 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/ClientDatasetSuite.scala
 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/ClientDatasetSuite.scala
index 7e6cebfd972d..2eed9f187456 100644
--- 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/ClientDatasetSuite.scala
+++ 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/ClientDatasetSuite.scala
@@ -22,7 +22,6 @@ import java.util.concurrent.atomic.AtomicLong
 
 import io.grpc.Server
 import io.grpc.inprocess.{InProcessChannelBuilder, InProcessServerBuilder}
-import org.scalatest.BeforeAndAfterEach
 
 import org.apache.spark.connect.proto
 import org.apache.spark.sql.Column
@@ -35,7 +34,7 @@ import org.apache.spark.util.SparkSerDeUtils
 // - sample fraction: simple.sample(0.1)
 // - sample withReplacement_fraction: simple.sample(withReplacement = true, 
0.11)
 // Add tests for exceptions thrown
-class ClientDatasetSuite extends ConnectFunSuite with BeforeAndAfterEach {
+class ClientDatasetSuite extends ConnectFunSuite {
 
   private var server: Server = _
   private var service: DummySparkConnectService = _
diff --git 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/SQLImplicitsTestSuite.scala
 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/SQLImplicitsTestSuite.scala
index 547d5ca7804a..fbab0dbde6a4 100644
--- 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/SQLImplicitsTestSuite.scala
+++ 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/SQLImplicitsTestSuite.scala
@@ -23,7 +23,6 @@ import java.util.concurrent.atomic.AtomicLong
 
 import io.grpc.inprocess.InProcessChannelBuilder
 import org.apache.arrow.memory.RootAllocator
-import org.scalatest.BeforeAndAfterAll
 
 import org.apache.spark.sql.{Column, Encoder, SaveMode}
 import 
org.apache.spark.sql.catalyst.encoders.AgnosticEncoders.agnosticEncoderFor
@@ -35,7 +34,7 @@ import org.apache.spark.util.SparkSystemUtils
 /**
  * Test suite for SQL implicits.
  */
-class SQLImplicitsTestSuite extends ConnectFunSuite with BeforeAndAfterAll {
+class SQLImplicitsTestSuite extends ConnectFunSuite {
   private var session: SparkSession = _
 
   override protected def beforeAll(): Unit = {
diff --git 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/ArtifactSuite.scala
 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/ArtifactSuite.scala
index dcf3b91fece2..e17cda3a5ba2 100644
--- 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/ArtifactSuite.scala
+++ 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/ArtifactSuite.scala
@@ -27,7 +27,6 @@ import com.google.protobuf.ByteString
 import io.grpc.{ManagedChannel, Server}
 import io.grpc.inprocess.{InProcessChannelBuilder, InProcessServerBuilder}
 import org.apache.commons.codec.digest.DigestUtils.sha256Hex
-import org.scalatest.BeforeAndAfterEach
 
 import org.apache.spark.connect.proto.AddArtifactsRequest
 import org.apache.spark.sql.Artifact
@@ -36,7 +35,7 @@ import org.apache.spark.sql.connect.test.ConnectFunSuite
 import org.apache.spark.util.IvyTestUtils
 import org.apache.spark.util.MavenUtils.MavenCoordinate
 
-class ArtifactSuite extends ConnectFunSuite with BeforeAndAfterEach {
+class ArtifactSuite extends ConnectFunSuite {
 
   private var client: SparkConnectClient = _
   private var service: DummySparkConnectService = _
diff --git 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientRetriesSuite.scala
 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientRetriesSuite.scala
index c0738d7de325..7ea01e34ec88 100644
--- 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientRetriesSuite.scala
+++ 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientRetriesSuite.scala
@@ -22,15 +22,11 @@ import com.google.protobuf.{Any, Duration}
 import com.google.rpc
 import io.grpc.{Status, StatusRuntimeException}
 import io.grpc.protobuf.StatusProto
-import org.scalatest.BeforeAndAfterEach
 import org.scalatest.concurrent.Eventually
 
 import org.apache.spark.sql.connect.test.ConnectFunSuite
 
-class SparkConnectClientRetriesSuite
-    extends ConnectFunSuite
-    with BeforeAndAfterEach
-    with Eventually {
+class SparkConnectClientRetriesSuite extends ConnectFunSuite with Eventually {
 
   private class DummyFn(e: => Throwable, numFails: Int = 3) {
     var counter = 0
diff --git 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala
 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala
index 743112c6dd4d..20d1187d2a8f 100644
--- 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala
+++ 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala
@@ -25,7 +25,6 @@ import scala.jdk.CollectionConverters._
 import io.grpc.{CallOptions, Channel, ClientCall, ClientInterceptor, 
MethodDescriptor, Server, Status, StatusRuntimeException}
 import io.grpc.netty.NettyServerBuilder
 import io.grpc.stub.StreamObserver
-import org.scalatest.BeforeAndAfterEach
 import org.scalatest.concurrent.Eventually
 import org.scalatest.concurrent.Futures.timeout
 import org.scalatest.time.SpanSugar._
@@ -37,7 +36,7 @@ import org.apache.spark.sql.connect.SparkSession
 import org.apache.spark.sql.connect.common.config.ConnectCommon
 import org.apache.spark.sql.connect.test.ConnectFunSuite
 
-class SparkConnectClientSuite extends ConnectFunSuite with BeforeAndAfterEach {
+class SparkConnectClientSuite extends ConnectFunSuite {
 
   private var client: SparkConnectClient = _
   private var service: DummySparkConnectService = _
diff --git 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/arrow/ArrowEncoderSuite.scala
 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/arrow/ArrowEncoderSuite.scala
index 52a503d62601..5cd7a3a2acde 100644
--- 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/arrow/ArrowEncoderSuite.scala
+++ 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/arrow/ArrowEncoderSuite.scala
@@ -28,7 +28,6 @@ import scala.reflect.classTag
 
 import org.apache.arrow.memory.{BufferAllocator, RootAllocator}
 import org.apache.arrow.vector.VarBinaryVector
-import org.scalatest.BeforeAndAfterAll
 
 import org.apache.spark.{SparkRuntimeException, 
SparkUnsupportedOperationException}
 import org.apache.spark.sql.{AnalysisException, Encoders, Row}
@@ -51,7 +50,7 @@ import org.apache.spark.util.{MaybeNull, SparkStringUtils}
 /**
  * Tests for encoding external data to and from arrow.
  */
-class ArrowEncoderSuite extends ConnectFunSuite with BeforeAndAfterAll {
+class ArrowEncoderSuite extends ConnectFunSuite {
   private val allocator = new RootAllocator()
 
   private def newAllocator(name: String): BufferAllocator = {
diff --git 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/streaming/TransformWithStateConnectSuite.scala
 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/streaming/TransformWithStateConnectSuite.scala
index 359486ae8e65..f054784b5c0b 100644
--- 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/streaming/TransformWithStateConnectSuite.scala
+++ 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/streaming/TransformWithStateConnectSuite.scala
@@ -21,7 +21,6 @@ import java.io.{BufferedWriter, File, FileWriter}
 import java.nio.file.Paths
 import java.sql.Timestamp
 
-import org.scalatest.BeforeAndAfterEach
 import org.scalatest.concurrent.Eventually.eventually
 import org.scalatest.concurrent.Futures.timeout
 import org.scalatest.time.SpanSugar._
@@ -285,11 +284,7 @@ class TTLTestStatefulProcessor
   }
 }
 
-class TransformWithStateConnectSuite
-    extends QueryTest
-    with RemoteSparkSession
-    with Logging
-    with BeforeAndAfterEach {
+class TransformWithStateConnectSuite extends QueryTest with RemoteSparkSession 
with Logging {
   val testData: Seq[(String, String)] = Seq(("a", "1"), ("b", "1"), ("a", "2"))
   val twsAdditionalSQLConf = Seq(
     "spark.sql.streaming.stateStore.providerClass" ->
diff --git 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/test/ConnectFunSuite.scala
 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/test/ConnectFunSuite.scala
index 89f70d6f1214..a6eef45164a5 100644
--- 
a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/test/ConnectFunSuite.scala
+++ 
b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/test/ConnectFunSuite.scala
@@ -18,6 +18,7 @@ package org.apache.spark.sql.connect.test
 
 import java.nio.file.Path
 
+import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
 import org.scalatest.funsuite.AnyFunSuite // scalastyle:ignore funsuite
 
 import org.apache.spark.connect.proto
@@ -27,7 +28,10 @@ import 
org.apache.spark.sql.connect.ColumnNodeToProtoConverter
 /**
  * The basic testsuite the client tests should extend from.
  */
-trait ConnectFunSuite extends AnyFunSuite { // scalastyle:ignore funsuite
+trait ConnectFunSuite
+    extends AnyFunSuite // scalastyle:ignore funsuite
+    with BeforeAndAfterAll
+    with BeforeAndAfterEach {
 
   // Borrowed from SparkFunSuite
   protected def getWorkspaceFilePath(first: String, more: String*): Path = {
diff --git 
a/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/service/SparkConnectCloneSessionSuite.scala
 
b/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/service/SparkConnectCloneSessionSuite.scala
index 42541b8c5f00..09292ec2a227 100644
--- 
a/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/service/SparkConnectCloneSessionSuite.scala
+++ 
b/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/service/SparkConnectCloneSessionSuite.scala
@@ -19,12 +19,10 @@ package org.apache.spark.sql.connect.service
 
 import java.util.UUID
 
-import org.scalatest.BeforeAndAfterEach
-
 import org.apache.spark.SparkSQLException
 import org.apache.spark.sql.test.SharedSparkSession
 
-class SparkConnectCloneSessionSuite extends SharedSparkSession with 
BeforeAndAfterEach {
+class SparkConnectCloneSessionSuite extends SharedSparkSession {
 
   override def beforeEach(): Unit = {
     super.beforeEach()
diff --git 
a/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/service/SparkConnectSessionManagerSuite.scala
 
b/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/service/SparkConnectSessionManagerSuite.scala
index 04d16a910746..1716fbb34b96 100644
--- 
a/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/service/SparkConnectSessionManagerSuite.scala
+++ 
b/sql/connect/server/src/test/scala/org/apache/spark/sql/connect/service/SparkConnectSessionManagerSuite.scala
@@ -19,7 +19,6 @@ package org.apache.spark.sql.connect.service
 
 import java.util.UUID
 
-import org.scalatest.BeforeAndAfterEach
 import org.scalatest.time.SpanSugar._
 
 import org.apache.spark.SparkSQLException
@@ -28,7 +27,7 @@ import org.apache.spark.sql.pipelines.graph.{DataflowGraph, 
PipelineUpdateContex
 import org.apache.spark.sql.pipelines.logging.PipelineEvent
 import org.apache.spark.sql.test.SharedSparkSession
 
-class SparkConnectSessionManagerSuite extends SharedSparkSession with 
BeforeAndAfterEach {
+class SparkConnectSessionManagerSuite extends SharedSparkSession {
 
   override def beforeEach(): Unit = {
     super.beforeEach()
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSessionWindowingSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSessionWindowingSuite.scala
index 6d118a7fd98e..166ba2afcf22 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSessionWindowingSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSessionWindowingSuite.scala
@@ -19,8 +19,6 @@ package org.apache.spark.sql
 
 import java.time.LocalDateTime
 
-import org.scalatest.BeforeAndAfterEach
-
 import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
 import org.apache.spark.sql.catalyst.expressions.{AttributeReference, 
GreaterThan}
 import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, Expand, Filter, 
LogicalPlan, Project}
@@ -28,8 +26,7 @@ import org.apache.spark.sql.functions._
 import org.apache.spark.sql.test.SharedSparkSession
 import org.apache.spark.sql.types._
 
-class DataFrameSessionWindowingSuite extends QueryTest with SharedSparkSession
-  with BeforeAndAfterEach {
+class DataFrameSessionWindowingSuite extends QueryTest with SharedSparkSession 
{
 
   import testImplicits._
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/BaseScriptTransformationSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/BaseScriptTransformationSuite.scala
index b2a46afb13b9..5ddbf873d466 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/BaseScriptTransformationSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/BaseScriptTransformationSuite.scala
@@ -24,7 +24,6 @@ import org.json4s.DefaultFormats
 import org.json4s.JsonDSL._
 import org.json4s.jackson.JsonMethods._
 import org.scalatest.Assertions._
-import org.scalatest.BeforeAndAfterEach
 import org.scalatest.exceptions.TestFailedException
 
 import org.apache.spark.{SparkException, TaskContext, TestUtils}
@@ -40,8 +39,7 @@ import org.apache.spark.sql.types._
 import org.apache.spark.unsafe.types.CalendarInterval
 import org.apache.spark.util.ArrayImplicits._
 
-abstract class BaseScriptTransformationSuite extends SparkPlanTest with 
SQLTestUtils
-  with BeforeAndAfterEach {
+abstract class BaseScriptTransformationSuite extends SparkPlanTest with 
SQLTestUtils {
   import testImplicits._
   import ScriptTransformationIOSchema._
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSparkSubmitSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSparkSubmitSuite.scala
index 69145d890fc1..54f3dc8e15bc 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSparkSubmitSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSparkSubmitSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.execution
 
-import org.scalatest.{Assertions, BeforeAndAfterEach}
+import org.scalatest.Assertions
 import org.scalatest.matchers.must.Matchers
 import org.scalatest.time.SpanSugar._
 
@@ -36,7 +36,6 @@ import org.apache.spark.util.ResetSystemProperties
 @ExtendedSQLTest
 class WholeStageCodegenSparkSubmitSuite extends SparkSubmitTestUtils
   with Matchers
-  with BeforeAndAfterEach
   with ResetSystemProperties {
 
   test("Generated code on driver should not embed platform-specific constant") 
{
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuiteBase.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuiteBase.scala
index 4c3c2b4de344..938541e2f8c0 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuiteBase.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/ConnectionProviderSuiteBase.scala
@@ -22,12 +22,10 @@ import javax.security.auth.login.Configuration
 
 import scala.jdk.CollectionConverters._
 
-import org.scalatest.BeforeAndAfterEach
-
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.sql.execution.datasources.jdbc.{DriverRegistry, 
JDBCOptions}
 
-abstract class ConnectionProviderSuiteBase extends SparkFunSuite with 
BeforeAndAfterEach {
+abstract class ConnectionProviderSuiteBase extends SparkFunSuite {
   protected def registerDriver(driverClass: String): Driver = {
     DriverRegistry.register(driverClass)
     DriverManager.getDrivers.asScala.collectFirst {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcSourceSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcSourceSuite.scala
index 040999476ece..f36a81945860 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcSourceSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcSourceSuite.scala
@@ -31,7 +31,6 @@ import org.apache.orc.OrcFile
 import org.apache.orc.OrcProto.ColumnEncoding.Kind.{DICTIONARY_V2, DIRECT, 
DIRECT_V2}
 import org.apache.orc.OrcProto.Stream.Kind
 import org.apache.orc.impl.RecordReaderImpl
-import org.scalatest.BeforeAndAfterAll
 
 import org.apache.spark.{SPARK_VERSION_SHORT, SparkConf, SparkException}
 import org.apache.spark.sql.{Row, SPARK_VERSION_METADATA_KEY}
@@ -45,7 +44,7 @@ import org.apache.spark.util.Utils
 case class OrcData(intField: Int, stringField: String)
 
 abstract class OrcSuite
-  extends OrcTest with BeforeAndAfterAll with CommonFileDataSourceSuite with 
SQLTestUtilsBase {
+  extends OrcTest with CommonFileDataSourceSuite with SQLTestUtilsBase {
   import testImplicits._
 
   override protected def dataSourceFormat = "orc"
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcTest.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcTest.scala
index 7f95c1fe85d7..39368c2686fc 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcTest.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcTest.scala
@@ -22,8 +22,6 @@ import java.io.File
 import scala.reflect.ClassTag
 import scala.reflect.runtime.universe.TypeTag
 
-import org.scalatest.BeforeAndAfterAll
-
 import org.apache.spark.sql.{Column, DataFrame, QueryTest}
 import org.apache.spark.sql.catalyst.expressions.{Attribute, Predicate}
 import org.apache.spark.sql.catalyst.planning.PhysicalOperation
@@ -49,7 +47,7 @@ import org.apache.spark.util.Utils
  *       -> HiveOrcPartitionDiscoverySuite
  *   -> OrcFilterSuite
  */
-trait OrcTest extends QueryTest with FileBasedDataSourceTest with 
BeforeAndAfterAll {
+trait OrcTest extends QueryTest with FileBasedDataSourceTest {
 
   val orcImp: String = "native"
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlPartitioningSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlPartitioningSuite.scala
index 2a5a36c74a28..a9ce0d42a685 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlPartitioningSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/XmlPartitioningSuite.scala
@@ -16,7 +16,6 @@
  */
 package org.apache.spark.sql.execution.datasources.xml
 
-import org.scalatest.BeforeAndAfterAll
 import org.scalatest.matchers.should.Matchers
 
 import org.apache.spark.SparkFunSuite
@@ -26,7 +25,7 @@ import org.apache.spark.sql.internal.SQLConf
 /**
  * Tests various cases of partition size, compression.
  */
-class XmlPartitioningSuite extends SparkFunSuite with Matchers with 
BeforeAndAfterAll {
+class XmlPartitioningSuite extends SparkFunSuite with Matchers {
   protected val legacyParserEnabled: Boolean = false
 
   protected def doPartitionTest(suffix: String, blockSize: Long, large: 
Boolean): Unit = {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/parsers/StaxXmlParserUtilsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/parsers/StaxXmlParserUtilsSuite.scala
index ad5b176f71f7..2e26c2bb3447 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/parsers/StaxXmlParserUtilsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/xml/parsers/StaxXmlParserUtilsSuite.scala
@@ -21,12 +21,10 @@ import javax.xml.stream.{XMLInputFactory, 
XMLStreamConstants}
 
 import scala.jdk.CollectionConverters._
 
-import org.scalatest.BeforeAndAfterAll
-
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.sql.catalyst.xml.{StaxXmlParserUtils, XmlOptions}
 
-final class StaxXmlParserUtilsSuite extends SparkFunSuite with 
BeforeAndAfterAll {
+final class StaxXmlParserUtilsSuite extends SparkFunSuite {
 
   private val factory = StaxXmlParserUtils.factory
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/python/streaming/BaseStreamingArrowWriterSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/python/streaming/BaseStreamingArrowWriterSuite.scala
index 49839fb8c985..aa6aca507624 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/python/streaming/BaseStreamingArrowWriterSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/python/streaming/BaseStreamingArrowWriterSuite.scala
@@ -20,13 +20,12 @@ import org.apache.arrow.vector.VectorSchemaRoot
 import org.apache.arrow.vector.ipc.ArrowStreamWriter
 import org.mockito.ArgumentMatchers.any
 import org.mockito.Mockito.{mock, never, times, verify, when}
-import org.scalatest.BeforeAndAfterEach
 
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.execution.arrow.ArrowWriter
 
-class BaseStreamingArrowWriterSuite extends SparkFunSuite with 
BeforeAndAfterEach {
+class BaseStreamingArrowWriterSuite extends SparkFunSuite {
   // Setting the maximum number of records per batch to 2 to make test easier.
   val arrowMaxRecordsPerBatch = 2
   val arrowMaxBytesPerBatch = Int.MaxValue
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamTest.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamTest.scala
index e57c4e1e665c..f36a4bc953b2 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamTest.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamTest.scala
@@ -23,7 +23,7 @@ import scala.reflect.ClassTag
 import scala.util.Random
 import scala.util.control.NonFatal
 
-import org.scalatest.{Assertions, BeforeAndAfterAll}
+import org.scalatest.Assertions
 import org.scalatest.concurrent.{Eventually, Signaler, ThreadSignaler, 
TimeLimits}
 import org.scalatest.concurrent.PatienceConfiguration.Timeout
 import org.scalatest.exceptions.TestFailedDueToTimeoutException
@@ -73,7 +73,7 @@ import org.apache.spark.util.{Clock, SystemClock, Utils}
  * avoid hanging forever in the case of failures. However, individual suites 
can change this
  * by overriding `streamingTimeout`.
  */
-trait StreamTest extends QueryTest with SharedSparkSession with TimeLimits 
with BeforeAndAfterAll {
+trait StreamTest extends QueryTest with SharedSparkSession with TimeLimits {
 
   // Necessary to make ScalaTest 3.x interrupt a thread on the JVM like 
ScalaTest 2.2.x
   implicit val defaultSignaler: Signaler = ThreadSignaler
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/EpochCoordinatorSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/EpochCoordinatorSuite.scala
index 02c0dbb5ab98..bc4c64883868 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/EpochCoordinatorSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/EpochCoordinatorSuite.scala
@@ -20,7 +20,6 @@ package org.apache.spark.sql.streaming.continuous
 import org.mockito.{ArgumentCaptor, InOrder}
 import org.mockito.ArgumentMatchers.{any, eq => eqTo}
 import org.mockito.Mockito._
-import org.scalatest.BeforeAndAfterEach
 import org.scalatestplus.mockito.MockitoSugar
 
 import org.apache.spark._
@@ -36,8 +35,7 @@ import org.apache.spark.sql.test.TestSparkSession
 class EpochCoordinatorSuite
   extends SparkFunSuite
     with LocalSparkSession
-    with MockitoSugar
-    with BeforeAndAfterEach {
+    with MockitoSugar {
 
   private var epochCoordinator: RpcEndpointRef = _
 
diff --git 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
index 9db2beaf1e7a..1d8a3261d34c 100644
--- 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
+++ 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
@@ -40,7 +40,6 @@ import org.apache.hive.service.rpc.thrift.TCLIService.Client
 import org.apache.hive.service.rpc.thrift.TRowSet
 import org.apache.thrift.protocol.TBinaryProtocol
 import org.apache.thrift.transport.TSocket
-import org.scalatest.BeforeAndAfterAll
 import org.scalatest.concurrent.Eventually._
 
 import org.apache.spark.{SparkException, SparkFunSuite}
@@ -1181,7 +1180,7 @@ object ServerMode extends Enumeration {
   val binary, http = Value
 }
 
-abstract class HiveThriftServer2TestBase extends SparkFunSuite with 
BeforeAndAfterAll with Logging {
+abstract class HiveThriftServer2TestBase extends SparkFunSuite with Logging {
   def mode: ServerMode.Value
 
   private val CLASS_NAME = 
HiveThriftServer2.getClass.getCanonicalName.stripSuffix("$")
diff --git 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/UISeleniumSuite.scala
 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/UISeleniumSuite.scala
index 592b5aac3d0f..cf2c5660bdd7 100644
--- 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/UISeleniumSuite.scala
+++ 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/UISeleniumSuite.scala
@@ -25,7 +25,6 @@ import scala.util.Random
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars
 import org.openqa.selenium.WebDriver
 import org.openqa.selenium.htmlunit.HtmlUnitDriver
-import org.scalatest.BeforeAndAfterAll
 import org.scalatest.concurrent.Eventually._
 import org.scalatest.matchers.must.Matchers
 import org.scalatest.matchers.should.Matchers._
@@ -38,7 +37,7 @@ import org.apache.spark.ui.SparkUICssErrorHandler
 @WebBrowserTest
 class UISeleniumSuite
   extends HiveThriftServer2TestBase
-  with WebBrowser with Matchers with BeforeAndAfterAll {
+  with WebBrowser with Matchers {
 
   implicit var webDriver: WebDriver = _
   var server: HiveThriftServer2 = _
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CompressionCodecSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CompressionCodecSuite.scala
index 52138ae45587..c75223bfef12 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CompressionCodecSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CompressionCodecSuite.scala
@@ -25,7 +25,6 @@ import scala.jdk.CollectionConverters._
 import org.apache.hadoop.fs.Path
 import org.apache.orc.OrcConf.COMPRESS
 import org.apache.parquet.hadoop.ParquetOutputFormat
-import org.scalatest.BeforeAndAfterAll
 
 import org.apache.spark.sql.execution.datasources.orc.{OrcCompressionCodec, 
OrcOptions}
 import 
org.apache.spark.sql.execution.datasources.parquet.{ParquetCompressionCodec, 
ParquetOptions, ParquetTest}
@@ -33,7 +32,7 @@ import org.apache.spark.sql.hive.orc.OrcFileOperator
 import org.apache.spark.sql.hive.test.TestHiveSingleton
 import org.apache.spark.sql.internal.SQLConf
 
-class CompressionCodecSuite extends TestHiveSingleton with ParquetTest with 
BeforeAndAfterAll {
+class CompressionCodecSuite extends TestHiveSingleton with ParquetTest {
   import spark.implicits._
 
   override def beforeAll(): Unit = {
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala
index a4caf78bed05..cea29e9a6fbc 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala
@@ -19,13 +19,11 @@ package org.apache.spark.sql.hive
 
 import scala.util.Try
 
-import org.scalatest.BeforeAndAfterEach
-
 import org.apache.spark.sql.{AnalysisException, QueryTest}
 import org.apache.spark.sql.catalyst.util.quietly
 import org.apache.spark.sql.hive.test.TestHiveSingleton
 
-class ErrorPositionSuite extends QueryTest with TestHiveSingleton with 
BeforeAndAfterEach {
+class ErrorPositionSuite extends QueryTest with TestHiveSingleton {
   import spark.implicits._
 
   override protected def beforeEach(): Unit = {
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSchemaInferenceSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSchemaInferenceSuite.scala
index 7bbd4fac3d0d..5e521565383c 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSchemaInferenceSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSchemaInferenceSuite.scala
@@ -22,8 +22,6 @@ import java.util.Locale
 
 import scala.util.Random
 
-import org.scalatest.BeforeAndAfterEach
-
 import org.apache.spark.sql.QueryTest
 import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.catalog._
@@ -35,7 +33,7 @@ import org.apache.spark.sql.test.SQLTestUtils
 import org.apache.spark.sql.types._
 
 class HiveSchemaInferenceSuite
-  extends QueryTest with TestHiveSingleton with SQLTestUtils with 
BeforeAndAfterEach {
+  extends QueryTest with TestHiveSingleton with SQLTestUtils {
 
   import HiveSchemaInferenceSuite._
   import HiveExternalCatalog.DATASOURCE_SCHEMA_PREFIX
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index f44f8b80fea5..504f9a300621 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -24,7 +24,6 @@ import scala.util.Properties
 import org.apache.hadoop.fs.Path
 import org.apache.hadoop.hive.common.FileUtils
 import org.scalatest.Assertions._
-import org.scalatest.BeforeAndAfterEach
 import org.scalatest.matchers.must.Matchers
 import org.scalatest.time.Span
 import org.scalatest.time.SpanSugar._
@@ -54,7 +53,6 @@ import org.apache.spark.util.{ResetSystemProperties, Utils}
 class HiveSparkSubmitSuite
   extends SparkSubmitTestUtils
   with Matchers
-  with BeforeAndAfterEach
   with ResetSystemProperties {
 
   override protected val defaultSparkSubmitTimeout: Span = 5.minutes
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala
index 9de5c6aab9cc..7ef5bea7312b 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ListTablesSuite.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.sql.hive
 
-import org.scalatest.BeforeAndAfterAll
-
 import org.apache.spark.sql.QueryTest
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.catalyst.TableIdentifier
@@ -27,8 +25,7 @@ import org.apache.spark.sql.hive.test.TestHiveSingleton
 
 class ListTablesSuite extends QueryTest
   with AnalysisTest
-  with TestHiveSingleton
-  with BeforeAndAfterAll {
+  with TestHiveSingleton {
   import hiveContext._
   import hiveContext.sparkSession.implicits._
 
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/PartitionedTablePerfStatsSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/PartitionedTablePerfStatsSuite.scala
index b67370f6eb9f..39a44a69674d 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/PartitionedTablePerfStatsSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/PartitionedTablePerfStatsSuite.scala
@@ -20,8 +20,6 @@ package org.apache.spark.sql.hive
 import java.io.File
 import java.util.concurrent.{Executors, TimeUnit}
 
-import org.scalatest.BeforeAndAfterEach
-
 import org.apache.spark.metrics.source.HiveCatalogMetrics
 import org.apache.spark.sql.QueryTest
 import org.apache.spark.sql.execution.datasources.FileStatusCache
@@ -30,7 +28,7 @@ import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.test.SQLTestUtils
 
 class PartitionedTablePerfStatsSuite
-  extends QueryTest with TestHiveSingleton with SQLTestUtils with 
BeforeAndAfterEach {
+  extends QueryTest with TestHiveSingleton with SQLTestUtils {
 
   override def beforeEach(): Unit = {
     super.beforeEach()
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala
index 5dce214a896b..b3f1dc0792fc 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.sql.hive
 
-import org.scalatest.BeforeAndAfterEach
-
 import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest, Row}
 import org.apache.spark.sql.catalyst.FunctionIdentifier
 import org.apache.spark.sql.catalyst.parser.ParseException
@@ -36,8 +34,7 @@ case class FunctionResult(f1: String, f2: String)
 class UDFSuite
   extends QueryTest
   with SQLTestUtils
-  with TestHiveSingleton
-  with BeforeAndAfterEach {
+  with TestHiveSingleton {
 
   import spark.implicits._
 
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HivePartitionFilteringSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HivePartitionFilteringSuite.scala
index 693cb0ea1b1d..fae01d6cbc45 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HivePartitionFilteringSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HivePartitionFilteringSuite.scala
@@ -24,7 +24,6 @@ import org.apache.hadoop.hive.conf.HiveConf
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
 import org.apache.hadoop.hive.serde2.`lazy`.LazySimpleSerDe
 import org.apache.hadoop.mapred.TextInputFormat
-import org.scalatest.BeforeAndAfterAll
 
 import org.apache.spark.SparkRuntimeException
 import org.apache.spark.sql.catalyst.TableIdentifier
@@ -38,7 +37,7 @@ import org.apache.spark.sql.types.{BooleanType, DateType, 
IntegerType, LongType,
 import org.apache.spark.util.Utils
 
 class HivePartitionFilteringSuite(version: String)
-    extends HiveVersionSuite(version) with BeforeAndAfterAll with SQLHelper {
+    extends HiveVersionSuite(version) with SQLHelper {
 
   override def beforeEach(): Unit = {
     super.beforeEach()
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala
index f7dcd28c5037..cc4eecd10a43 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveComparisonTest.scala
@@ -25,8 +25,6 @@ import java.util.Locale
 
 import scala.util.control.NonFatal
 
-import org.scalatest.BeforeAndAfterAll
-
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.sql.catalyst.planning.PhysicalOperation
 import org.apache.spark.sql.catalyst.plans.logical._
@@ -47,7 +45,7 @@ import org.apache.spark.sql.hive.test.{TestHive, 
TestHiveQueryExecution}
  * See the documentation of public vals in this class for information on how 
test execution can be
  * configured using system properties.
  */
-abstract class HiveComparisonTest extends SparkFunSuite with BeforeAndAfterAll 
{
+abstract class HiveComparisonTest extends SparkFunSuite {
 
   override protected val enableAutoThreadAudit = false
 
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index 3dee66303a50..5c23192b1d3f 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -29,7 +29,6 @@ import 
org.apache.parquet.format.converter.ParquetMetadataConverter.NO_FILTER
 import org.apache.parquet.hadoop.util.HadoopInputFile
 import org.mockito.ArgumentMatchers.any
 import org.mockito.Mockito.{spy, times, verify}
-import org.scalatest.BeforeAndAfterEach
 
 import org.apache.spark.{SparkException, SparkUnsupportedOperationException}
 import org.apache.spark.sql.{AnalysisException, Row, SaveMode}
@@ -59,7 +58,7 @@ import org.apache.spark.util.Utils
 
 @SlowHiveTest
 class HiveDDLSuite
-  extends DDLSuite with SQLTestUtils with TestHiveSingleton with 
BeforeAndAfterEach {
+  extends DDLSuite with SQLTestUtils with TestHiveSingleton {
   import testImplicits._
   val hiveFormats = Seq("PARQUET", "ORC", "TEXTFILE", "SEQUENCEFILE", 
"RCFILE", "AVRO")
 
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala
index aac601043f33..7e7499449bb6 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveSerDeSuite.scala
@@ -19,8 +19,6 @@ package org.apache.spark.sql.hive.execution
 
 import java.net.URI
 
-import org.scalatest.BeforeAndAfterAll
-
 import org.apache.spark.sql.{AnalysisException, SparkSession}
 import org.apache.spark.sql.catalyst.catalog.CatalogTable
 import org.apache.spark.sql.catalyst.plans.PlanTest
@@ -35,7 +33,7 @@ import org.apache.spark.tags.SlowHiveTest
  * A set of tests that validates support for Hive SerDe.
  */
 @SlowHiveTest
-class HiveSerDeSuite extends HiveComparisonTest with PlanTest with 
BeforeAndAfterAll {
+class HiveSerDeSuite extends HiveComparisonTest with PlanTest {
   override def beforeAll(): Unit = {
     import TestHive._
     import org.apache.hadoop.hive.serde2.RegexSerDe
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHiveSingleton.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHiveSingleton.scala
index c8d72b78c64c..d1bc6bd92fff 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHiveSingleton.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHiveSingleton.scala
@@ -17,15 +17,13 @@
 
 package org.apache.spark.sql.hive.test
 
-import org.scalatest.BeforeAndAfterAll
-
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.sql.classic.SparkSession
 import org.apache.spark.sql.hive.HiveExternalCatalog
 import org.apache.spark.sql.hive.client.HiveClient
 
 
-trait TestHiveSingleton extends SparkFunSuite with BeforeAndAfterAll {
+trait TestHiveSingleton extends SparkFunSuite {
   override protected val enableAutoThreadAudit = false
   protected val spark: SparkSession = TestHive.sparkSession
   protected val hiveContext: TestHiveContext = TestHive
diff --git 
a/sql/pipelines/src/test/scala/org/apache/spark/sql/pipelines/logging/ConstructPipelineEventSuite.scala
 
b/sql/pipelines/src/test/scala/org/apache/spark/sql/pipelines/logging/ConstructPipelineEventSuite.scala
index f91dd673ca25..fe37ccf3c06c 100644
--- 
a/sql/pipelines/src/test/scala/org/apache/spark/sql/pipelines/logging/ConstructPipelineEventSuite.scala
+++ 
b/sql/pipelines/src/test/scala/org/apache/spark/sql/pipelines/logging/ConstructPipelineEventSuite.scala
@@ -19,13 +19,11 @@ package org.apache.spark.sql.pipelines.logging
 
 import java.sql.Timestamp
 
-import org.scalatest.BeforeAndAfterEach
-
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.sql.pipelines.common.FlowStatus
 import org.apache.spark.sql.pipelines.graph.QueryOrigin
 
-class ConstructPipelineEventSuite extends SparkFunSuite with 
BeforeAndAfterEach {
+class ConstructPipelineEventSuite extends SparkFunSuite {
   test("Basic event construction") {
     val ts = new Timestamp(1747338049615L)
     val event = ConstructPipelineEvent(


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to