This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 5bcbc54acc11 [SPARK-55022][SQL][TESTS] Remove unnecessary mixins with 
`SharedSparkSession`
5bcbc54acc11 is described below

commit 5bcbc54acc11e24b2f39e66626b70401d5e193ec
Author: Ruifeng Zheng <[email protected]>
AuthorDate: Tue Jan 13 22:27:45 2026 +0800

    [SPARK-55022][SQL][TESTS] Remove unnecessary mixins with 
`SharedSparkSession`
    
    ### What changes were proposed in this pull request?
    Remove unnecessary mixins with `SharedSparkSession`
    
    `SharedSparkSession` already extends `SQLTestUtils` and `PlanTest`
    
    ### Why are the changes needed?
    Code clean up, to make it easier for further refactoring
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    CI
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #53786 from zhengruifeng/del_share_spark_session.
    
    Authored-by: Ruifeng Zheng <[email protected]>
    Signed-off-by: Wenchen Fan <[email protected]>
---
 .../test/scala/org/apache/spark/sql/avro/AvroSchemaHelperSuite.scala | 4 ++--
 sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala  | 4 ++--
 .../test/scala/org/apache/spark/sql/InjectRuntimeFilterSuite.scala   | 4 ++--
 sql/core/src/test/scala/org/apache/spark/sql/JoinHintSuite.scala     | 3 +--
 .../test/scala/org/apache/spark/sql/RuntimeNullChecksV2Writes.scala  | 4 ++--
 .../apache/spark/sql/analysis/resolver/MetadataResolverSuite.scala   | 5 ++---
 .../scala/org/apache/spark/sql/execution/ColumnarRulesSuite.scala    | 3 +--
 .../spark/sql/execution/bucketing/CoalesceBucketsInJoinSuite.scala   | 4 ++--
 .../spark/sql/execution/datasources/DataSourceStrategySuite.scala    | 3 +--
 .../spark/sql/execution/datasources/FileSourceCodecSuite.scala       | 4 ++--
 .../sql/execution/datasources/v2/DataSourceV2StrategySuite.scala     | 3 +--
 .../spark/sql/execution/exchange/ValidateRequirementsSuite.scala     | 4 ++--
 12 files changed, 20 insertions(+), 25 deletions(-)

diff --git 
a/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroSchemaHelperSuite.scala
 
b/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroSchemaHelperSuite.scala
index 8ad06492fa5d..936458561978 100644
--- 
a/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroSchemaHelperSuite.scala
+++ 
b/connector/avro/src/test/scala/org/apache/spark/sql/avro/AvroSchemaHelperSuite.scala
@@ -20,10 +20,10 @@ import org.apache.avro.SchemaBuilder
 
 import org.apache.spark.sql.avro.AvroUtils.AvroMatchedField
 import org.apache.spark.sql.internal.SQLConf
-import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
+import org.apache.spark.sql.test.SharedSparkSession
 import org.apache.spark.sql.types.{IntegerType, StringType, StructField, 
StructType}
 
-class AvroSchemaHelperSuite extends SQLTestUtils with SharedSparkSession {
+class AvroSchemaHelperSuite extends SharedSparkSession {
 
   test("ensure schema is a record") {
     val avroSchema = SchemaBuilder.builder().intType()
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
index 0d807aeae4d7..234383239e90 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
@@ -53,7 +53,7 @@ import 
org.apache.spark.sql.execution.exchange.ShuffleExchangeExec
 import 
org.apache.spark.sql.execution.ui.SparkListenerSQLAdaptiveExecutionUpdate
 import org.apache.spark.sql.functions._
 import org.apache.spark.sql.internal.SQLConf
-import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
+import org.apache.spark.sql.test.SharedSparkSession
 import org.apache.spark.sql.types.{StringType, StructField, StructType}
 import org.apache.spark.storage.{RDDBlockId, StorageLevel}
 import org.apache.spark.storage.StorageLevel.{MEMORY_AND_DISK_2, MEMORY_ONLY}
@@ -64,7 +64,7 @@ import org.apache.spark.util.{AccumulatorContext, Utils}
 private case class BigData(s: String)
 
 @SlowSQLTest
-class CachedTableSuite extends QueryTest with SQLTestUtils
+class CachedTableSuite extends QueryTest
   with SharedSparkSession
   with AdaptiveSparkPlanHelper {
   import testImplicits._
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/InjectRuntimeFilterSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/InjectRuntimeFilterSuite.scala
index 603ec183bfb6..290d6e87651c 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/InjectRuntimeFilterSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/InjectRuntimeFilterSuite.scala
@@ -24,10 +24,10 @@ import 
org.apache.spark.sql.catalyst.plans.logical.{Aggregate, Filter, LogicalPl
 import org.apache.spark.sql.execution.{ReusedSubqueryExec, SubqueryExec}
 import org.apache.spark.sql.execution.adaptive.{AdaptiveSparkPlanHelper, 
AQEPropagateEmptyRelation}
 import org.apache.spark.sql.internal.SQLConf
-import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
+import org.apache.spark.sql.test.SharedSparkSession
 import org.apache.spark.sql.types.{IntegerType, StructType}
 
-class InjectRuntimeFilterSuite extends QueryTest with SQLTestUtils with 
SharedSparkSession
+class InjectRuntimeFilterSuite extends QueryTest with SharedSparkSession
   with AdaptiveSparkPlanHelper {
 
   protected override def beforeAll(): Unit = {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JoinHintSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/JoinHintSuite.scala
index 53e47f428c3a..df9f1d1ed2da 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/JoinHintSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/JoinHintSuite.scala
@@ -20,7 +20,6 @@ package org.apache.spark.sql
 import org.apache.logging.log4j.Level
 
 import org.apache.spark.sql.catalyst.optimizer.{BuildLeft, BuildRight, 
BuildSide, EliminateResolvedHint}
-import org.apache.spark.sql.catalyst.plans.PlanTest
 import org.apache.spark.sql.catalyst.plans.logical._
 import org.apache.spark.sql.catalyst.rules.RuleExecutor
 import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
@@ -28,7 +27,7 @@ import org.apache.spark.sql.execution.joins._
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.test.SharedSparkSession
 
-class JoinHintSuite extends PlanTest with SharedSparkSession with 
AdaptiveSparkPlanHelper {
+class JoinHintSuite extends SharedSparkSession with AdaptiveSparkPlanHelper {
   import testImplicits._
 
   lazy val df = spark.range(10)
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/RuntimeNullChecksV2Writes.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/RuntimeNullChecksV2Writes.scala
index ee4dfa929cf8..e57957daba07 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/RuntimeNullChecksV2Writes.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/RuntimeNullChecksV2Writes.scala
@@ -20,10 +20,10 @@ package org.apache.spark.sql
 import org.apache.spark.{SparkConf, SparkRuntimeException}
 import org.apache.spark.sql.connector.catalog.{Column => ColumnV2, Identifier, 
InMemoryTableCatalog, TableInfo}
 import org.apache.spark.sql.internal.SQLConf
-import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
+import org.apache.spark.sql.test.SharedSparkSession
 import org.apache.spark.sql.types.{ArrayType, IntegerType, MapType, StructType}
 
-class RuntimeNullChecksV2Writes extends QueryTest with SQLTestUtils with 
SharedSparkSession {
+class RuntimeNullChecksV2Writes extends QueryTest with SharedSparkSession {
 
   import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/analysis/resolver/MetadataResolverSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/analysis/resolver/MetadataResolverSuite.scala
index f315f1b8a597..fdb8aebffbdd 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/analysis/resolver/MetadataResolverSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/analysis/resolver/MetadataResolverSuite.scala
@@ -26,14 +26,13 @@ import org.apache.spark.sql.catalyst.analysis.resolver._
 import org.apache.spark.sql.catalyst.catalog.UnresolvedCatalogRelation
 import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, 
SubqueryAlias, View}
 import org.apache.spark.sql.execution.datasources.{FileResolver, 
HadoopFsRelation, LogicalRelation}
-import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
+import org.apache.spark.sql.test.SharedSparkSession
 import org.apache.spark.sql.types._
 import org.apache.spark.sql.util.CaseInsensitiveStringMap
 
 class MetadataResolverSuite
     extends QueryTest
-    with SharedSparkSession
-    with SQLTestUtils {
+    with SharedSparkSession {
   private val catalogName = "spark_catalog"
 
   private val keyValueTableSchema = StructType(
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/ColumnarRulesSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/ColumnarRulesSuite.scala
index 5d4d2d48f829..4658df4061d7 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/ColumnarRulesSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/ColumnarRulesSuite.scala
@@ -21,11 +21,10 @@ import org.apache.spark.SparkUnsupportedOperationException
 import org.apache.spark.rdd.RDD
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.expressions.Attribute
-import org.apache.spark.sql.catalyst.plans.PlanTest
 import org.apache.spark.sql.test.SharedSparkSession
 import org.apache.spark.sql.vectorized.ColumnarBatch
 
-class ColumnarRulesSuite extends PlanTest with SharedSparkSession {
+class ColumnarRulesSuite extends SharedSparkSession {
 
   test("Idempotency of columnar rules - RowToColumnar/ColumnarToRow") {
     val rules = ApplyColumnarRulesAndInsertTransitions(
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/bucketing/CoalesceBucketsInJoinSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/bucketing/CoalesceBucketsInJoinSuite.scala
index b44e899c1a4f..f8d395bead67 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/bucketing/CoalesceBucketsInJoinSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/bucketing/CoalesceBucketsInJoinSuite.scala
@@ -27,10 +27,10 @@ import 
org.apache.spark.sql.execution.datasources.{HadoopFsRelation, InMemoryFil
 import org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat
 import org.apache.spark.sql.execution.joins.{BroadcastHashJoinExec, 
ShuffledHashJoinExec, SortMergeJoinExec}
 import org.apache.spark.sql.internal.SQLConf
-import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
+import org.apache.spark.sql.test.SharedSparkSession
 import org.apache.spark.sql.types.IntegerType
 
-class CoalesceBucketsInJoinSuite extends SQLTestUtils with SharedSparkSession {
+class CoalesceBucketsInJoinSuite extends SharedSparkSession {
   private val SORT_MERGE_JOIN = "sortMergeJoin"
   private val SHUFFLED_HASH_JOIN = "shuffledHashJoin"
   private val BROADCAST_HASH_JOIN = "broadcastHashJoin"
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategySuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategySuite.scala
index 9f0396ab60e3..56d07dce755f 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategySuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategySuite.scala
@@ -19,12 +19,11 @@ package org.apache.spark.sql.execution.datasources
 
 import org.apache.spark.sql.catalyst.dsl.expressions._
 import org.apache.spark.sql.catalyst.expressions._
-import org.apache.spark.sql.catalyst.plans.PlanTest
 import org.apache.spark.sql.sources
 import org.apache.spark.sql.test.SharedSparkSession
 import org.apache.spark.sql.types.{IntegerType, StringType, StructField, 
StructType}
 
-class DataSourceStrategySuite extends PlanTest with SharedSparkSession {
+class DataSourceStrategySuite extends SharedSparkSession {
   val attrInts = Seq(
     $"cint".int,
     $"`c.int`".int,
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceCodecSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceCodecSuite.scala
index 8aa2f079886a..d58a5d9f1cbd 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceCodecSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceCodecSuite.scala
@@ -23,9 +23,9 @@ import org.apache.spark.sql.QueryTest
 import org.apache.spark.sql.execution.datasources.orc.OrcCompressionCodec
 import 
org.apache.spark.sql.execution.datasources.parquet.ParquetCompressionCodec
 import org.apache.spark.sql.internal.SQLConf
-import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
+import org.apache.spark.sql.test.SharedSparkSession
 
-trait FileSourceCodecSuite extends QueryTest with SQLTestUtils with 
SharedSparkSession {
+trait FileSourceCodecSuite extends QueryTest with SharedSparkSession {
 
   protected def format: String
   protected val codecConfigName: String
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2StrategySuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2StrategySuite.scala
index ab6668eeb45a..5f89a618edd5 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2StrategySuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2StrategySuite.scala
@@ -21,7 +21,6 @@ import org.apache.spark.SparkConf
 import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute
 import org.apache.spark.sql.catalyst.dsl.expressions._
 import org.apache.spark.sql.catalyst.expressions._
-import org.apache.spark.sql.catalyst.plans.PlanTest
 import org.apache.spark.sql.catalyst.util.V2ExpressionBuilder
 import org.apache.spark.sql.connector.expressions.{Expression => V2Expression, 
FieldReference, GeneralScalarExpression, LiteralValue}
 import org.apache.spark.sql.connector.expressions.filter.{AlwaysFalse, 
AlwaysTrue, And => V2And, Not => V2Not, Or => V2Or, Predicate}
@@ -30,7 +29,7 @@ import org.apache.spark.sql.test.SharedSparkSession
 import org.apache.spark.sql.types.{BooleanType, DoubleType, IntegerType, 
LongType, StringType, StructField, StructType}
 import org.apache.spark.unsafe.types.UTF8String
 
-class DataSourceV2StrategySuite extends PlanTest with SharedSparkSession {
+class DataSourceV2StrategySuite extends SharedSparkSession {
 
   override protected def sparkConf: SparkConf = super.sparkConf
     .set(SQLConf.ANSI_ENABLED, true)
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/exchange/ValidateRequirementsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/exchange/ValidateRequirementsSuite.scala
index d46d4fd32017..4e02a10eb41e 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/exchange/ValidateRequirementsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/exchange/ValidateRequirementsSuite.scala
@@ -18,13 +18,13 @@
 package org.apache.spark.sql.execution.exchange
 
 import org.apache.spark.sql.catalyst.expressions.{Ascending, SortOrder}
-import org.apache.spark.sql.catalyst.plans.{Inner, PlanTest}
+import org.apache.spark.sql.catalyst.plans.Inner
 import org.apache.spark.sql.catalyst.plans.physical.{HashPartitioning, 
SinglePartition}
 import org.apache.spark.sql.execution.SortExec
 import org.apache.spark.sql.execution.joins.SortMergeJoinExec
 import org.apache.spark.sql.test.SharedSparkSession
 
-class ValidateRequirementsSuite extends PlanTest with SharedSparkSession {
+class ValidateRequirementsSuite extends SharedSparkSession {
 
   import testImplicits._
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to