This is an automated email from the ASF dual-hosted git repository.
beliefer pushed a commit to branch branch-4.0
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-4.0 by this push:
new b2e0e763e335 [SPARK-51292][SQL] Remove unnecessary inheritance from
PlanTestBase, ExpressionEvalHelper and PlanTest
b2e0e763e335 is described below
commit b2e0e763e335fd6b0f56cfb7ec9f0d2ad7ec948a
Author: beliefer <[email protected]>
AuthorDate: Sun Feb 23 17:44:29 2025 +0800
[SPARK-51292][SQL] Remove unnecessary inheritance from PlanTestBase,
ExpressionEvalHelper and PlanTest
### What changes were proposed in this pull request?
This PR proposes to remove unnecessary inheritance from `PlanTestBase`,
`ExpressionEvalHelper` and `PlanTest`.
### Why are the changes needed?
1. Some class extends both `ExpressionEvalHelper` and `PlanTestBase`, but
`ExpressionEvalHelper` already extends `PlanTestBase`.
```
trait ExpressionEvalHelper extends ScalaCheckDrivenPropertyChecks with
PlanTestBase {
self: SparkFunSuite =>
...
}
```
2. Class `NullDownPropagationSuite`, `OptimizeCsvExprsSuite`,
`PushFoldableIntoBranchesSuite` doesn't need `ExpressionEvalHelper` at all.
3. Some class extends both `QueryTest` and `PlanTest`, but `QueryTest`
already extends `PlanTest`.
```
abstract class QueryTest extends PlanTest {
...
}
```
### Does this PR introduce _any_ user-facing change?
'No'.
Just update the inner code.
### How was this patch tested?
GA.
### Was this patch authored or co-authored using generative AI tooling?
'No'.
Closes #50047 from beliefer/SPARK-51292.
Authored-by: beliefer <[email protected]>
Signed-off-by: beliefer <[email protected]>
(cherry picked from commit 30279686b63a4ea22f6664a63df1a2a957e737ef)
Signed-off-by: beliefer <[email protected]>
---
.../apache/spark/sql/catalyst/expressions/CsvExpressionsSuite.scala | 3 +--
.../apache/spark/sql/catalyst/expressions/JsonExpressionsSuite.scala | 3 +--
.../spark/sql/catalyst/expressions/UnsafeRowConverterSuite.scala | 4 +---
.../apache/spark/sql/catalyst/expressions/XmlExpressionsSuite.scala | 3 +--
.../spark/sql/catalyst/optimizer/NullDownPropagationSuite.scala | 2 +-
.../apache/spark/sql/catalyst/optimizer/OptimizeCsvExprsSuite.scala | 2 +-
.../spark/sql/catalyst/optimizer/PushFoldableIntoBranchesSuite.scala | 2 +-
sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala | 3 +--
8 files changed, 8 insertions(+), 14 deletions(-)
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CsvExpressionsSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CsvExpressionsSuite.scala
index 81dd8242c600..3e1f4a703483 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CsvExpressionsSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CsvExpressionsSuite.scala
@@ -25,13 +25,12 @@ import org.scalatest.exceptions.TestFailedException
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.plans.PlanTestBase
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.{PST, UTC_OPT}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
-class CsvExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper with
PlanTestBase {
+class CsvExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
val badCsv = "\u0000\u0000\u0000A\u0001AAA"
test("from_csv") {
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/JsonExpressionsSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/JsonExpressionsSuite.scala
index 0ec1a93b5cd2..467d0fc36032 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/JsonExpressionsSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/JsonExpressionsSuite.scala
@@ -28,13 +28,12 @@ import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
import org.apache.spark.sql.catalyst.expressions.Cast._
import
org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection
-import org.apache.spark.sql.catalyst.plans.PlanTestBase
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.{PST, UTC, UTC_OPT}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
-class JsonExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper
with PlanTestBase {
+class JsonExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
val json =
"""
|{"store":{"fruit":[{"weight":8,"type":"apple"},{"weight":9,"type":"pear"}],
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnsafeRowConverterSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnsafeRowConverterSuite.scala
index 790aa94b5840..1fbdcd97a346 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnsafeRowConverterSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnsafeRowConverterSuite.scala
@@ -25,15 +25,13 @@ import org.scalatest.matchers.should.Matchers._
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.plans.PlanTestBase
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.types.{IntegerType, LongType, _}
import org.apache.spark.unsafe.array.ByteArrayMethods
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
import org.apache.spark.util.ArrayImplicits._
-class UnsafeRowConverterSuite extends SparkFunSuite with Matchers with
PlanTestBase
- with ExpressionEvalHelper {
+class UnsafeRowConverterSuite extends SparkFunSuite with Matchers with
ExpressionEvalHelper {
private def roundedSize(size: Int) =
ByteArrayMethods.roundNumberOfBytesToNearestWord(size)
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/XmlExpressionsSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/XmlExpressionsSuite.scala
index 4f38cd0630f2..a56d5b22e94f 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/XmlExpressionsSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/XmlExpressionsSuite.scala
@@ -26,13 +26,12 @@ import org.apache.spark.{SparkException, SparkFunSuite}
import org.apache.spark.sql.{AnalysisException, Row}
import org.apache.spark.sql.catalyst.InternalRow
import
org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection
-import org.apache.spark.sql.catalyst.plans.PlanTestBase
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.{PST, UTC, UTC_OPT}
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
-class XmlExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper with
PlanTestBase {
+class XmlExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
test("from_xml escaping") {
val schema = StructType(StructField("\"quote", IntegerType) :: Nil)
GenerateUnsafeProjection.generate(
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/NullDownPropagationSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/NullDownPropagationSuite.scala
index 6b5e6d8a7918..52dd44144b20 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/NullDownPropagationSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/NullDownPropagationSuite.scala
@@ -25,7 +25,7 @@ import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules._
-class NullDownPropagationSuite extends PlanTest with ExpressionEvalHelper {
+class NullDownPropagationSuite extends PlanTest {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeCsvExprsSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeCsvExprsSuite.scala
index c5e5c81bf461..6d4ace46785c 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeCsvExprsSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeCsvExprsSuite.scala
@@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.rules.RuleExecutor
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
-class OptimizeCsvExprsSuite extends PlanTest with ExpressionEvalHelper {
+class OptimizeCsvExprsSuite extends PlanTest {
private var csvExpressionOptimizeEnabled: Boolean = _
protected override def beforeAll(): Unit = {
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/PushFoldableIntoBranchesSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/PushFoldableIntoBranchesSuite.scala
index bdeb192fc121..d88db82d94dd 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/PushFoldableIntoBranchesSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/PushFoldableIntoBranchesSuite.scala
@@ -32,7 +32,7 @@ import org.apache.spark.sql.types.{BooleanType, IntegerType,
StringType, Timesta
import org.apache.spark.unsafe.types.CalendarInterval
-class PushFoldableIntoBranchesSuite extends PlanTest with ExpressionEvalHelper
{
+class PushFoldableIntoBranchesSuite extends PlanTest {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches = Batch("PushFoldableIntoBranches", FixedPoint(50),
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala
index bb1363f1c58c..9031a0868687 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala
@@ -21,13 +21,12 @@ import java.time.{Instant, LocalDate, LocalDateTime, ZoneId}
import org.apache.spark.sql.catalyst.expressions.Literal
import org.apache.spark.sql.catalyst.parser.ParseException
-import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical.Limit
import org.apache.spark.sql.functions.{array, call_function, lit, map,
map_from_arrays, map_from_entries, str_to_map, struct}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
-class ParametersSuite extends QueryTest with SharedSparkSession with PlanTest {
+class ParametersSuite extends QueryTest with SharedSparkSession {
test("bind named parameters") {
val sqlText =
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]