This is an automated email from the ASF dual-hosted git repository.
beliefer pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 9a1f921ae19d [SPARK-51258][SQL][FOLLOWUP] Remove unnecessary
inheritance from SQLConfHelper
9a1f921ae19d is described below
commit 9a1f921ae19d31278baae2f023306749e6b54daa
Author: beliefer <[email protected]>
AuthorDate: Sun Feb 23 17:30:46 2025 +0800
[SPARK-51258][SQL][FOLLOWUP] Remove unnecessary inheritance from
SQLConfHelper
### What changes were proposed in this pull request?
This PR proposes to remove unnecessary inheritance from `SQLConfHelper`.
### Why are the changes needed?
1. There are some trait already extends `SQLConfHelper`, so we should avoid
the duplicated inheritance.
```
trait TreeNodeResolver[UnresolvedNode <: TreeNode[_], ResolvedNode <:
TreeNode[_]]
extends SQLConfHelper
with QueryErrorsBase {
def resolve(unresolvedNode: UnresolvedNode): ResolvedNode
}
```
```
trait SQLHelper extends SQLConfHelper {
...
}
```
```
trait PlanTestBase extends PredicateHelper with SQLHelper { self: Suite =>
..
}
trait PlanTest extends SparkFunSuite with PlanTestBase
```
2. `V2SessionCatalog` already with `SQLConfHelper`, so we should use `conf`
directly.
### Does this PR introduce _any_ user-facing change?
'No'.
Just update the inner code.
### How was this patch tested?
GA.
### Was this patch authored or co-authored using generative AI tooling?
'No'.
Closes #50046 from beliefer/SPARK-51258_followup.
Authored-by: beliefer <[email protected]>
Signed-off-by: beliefer <[email protected]>
---
.../catalyst/analysis/resolver/ConditionalExpressionResolver.scala | 4 +---
.../src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala | 3 +--
.../apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala | 4 ++--
.../scala/org/apache/spark/sql/analysis/resolver/NameScopeSuite.scala | 3 +--
4 files changed, 5 insertions(+), 9 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/ConditionalExpressionResolver.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/ConditionalExpressionResolver.scala
index 548c824b24f5..4532965c6c68 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/ConditionalExpressionResolver.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/ConditionalExpressionResolver.scala
@@ -17,7 +17,6 @@
package org.apache.spark.sql.catalyst.analysis.resolver
-import org.apache.spark.sql.catalyst.SQLConfHelper
import org.apache.spark.sql.catalyst.analysis.{AnsiTypeCoercion, TypeCoercion}
import org.apache.spark.sql.catalyst.expressions.{ConditionalExpression,
Expression}
@@ -28,8 +27,7 @@ class ConditionalExpressionResolver(
expressionResolver: ExpressionResolver,
timezoneAwareExpressionResolver: TimezoneAwareExpressionResolver)
extends TreeNodeResolver[ConditionalExpression, Expression]
- with ResolvesExpressionChildren
- with SQLConfHelper {
+ with ResolvesExpressionChildren {
private val typeCoercionTransformations: Seq[Expression => Expression] =
if (conf.ansiEnabled) {
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala
index 37baa66049de..f06e6ed137cc 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/PlanTest.scala
@@ -22,7 +22,6 @@ import org.scalatest.Suite
import org.scalatest.Tag
import org.apache.spark.SparkFunSuite
-import org.apache.spark.sql.catalyst.SQLConfHelper
import org.apache.spark.sql.catalyst.analysis.SimpleAnalyzer
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.CodegenObjectFactoryMode
@@ -62,7 +61,7 @@ trait CodegenInterpretedPlanTest extends PlanTest {
* Provides helper methods for comparing plans, but without the overhead of
* mandating a FunSuite.
*/
-trait PlanTestBase extends PredicateHelper with SQLHelper with SQLConfHelper {
self: Suite =>
+trait PlanTestBase extends PredicateHelper with SQLHelper { self: Suite =>
protected def normalizeExprIds(plan: LogicalPlan): LogicalPlan =
NormalizePlan.normalizeExprIds(plan)
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala
index 0a533645648e..e067730cfdf5 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala
@@ -48,7 +48,7 @@ class V2SessionCatalog(catalog: SessionCatalog)
extends TableCatalog with FunctionCatalog with SupportsNamespaces with
SQLConfHelper {
import V2SessionCatalog._
- override val defaultNamespace: Array[String] =
Array(SQLConf.get.defaultDatabase)
+ override val defaultNamespace: Array[String] = Array(conf.defaultDatabase)
override def name: String = CatalogManager.SESSION_CATALOG_NAME
@@ -83,7 +83,7 @@ class V2SessionCatalog(catalog: SessionCatalog)
}
private def hasCustomSessionCatalog: Boolean = {
- catalog.conf.getConf(SQLConf.V2_SESSION_CATALOG_IMPLEMENTATION) !=
"builtin"
+ conf.getConf(SQLConf.V2_SESSION_CATALOG_IMPLEMENTATION) != "builtin"
}
override def loadTable(ident: Identifier): Table = {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/analysis/resolver/NameScopeSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/analysis/resolver/NameScopeSuite.scala
index ecf9ca3e8d21..3b950e99281a 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/analysis/resolver/NameScopeSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/analysis/resolver/NameScopeSuite.scala
@@ -18,7 +18,6 @@
package org.apache.spark.sql.analysis.resolver
import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.catalyst.SQLConfHelper
import org.apache.spark.sql.catalyst.analysis.UnresolvedStar
import org.apache.spark.sql.catalyst.analysis.resolver.{NameScope,
NameScopeStack, NameTarget}
import org.apache.spark.sql.catalyst.expressions.{
@@ -41,7 +40,7 @@ import org.apache.spark.sql.types.{
StructType
}
-class NameScopeSuite extends PlanTest with SQLConfHelper {
+class NameScopeSuite extends PlanTest {
private val col1Integer = AttributeReference(name = "col1", dataType =
IntegerType)()
private val col1IntegerOther = AttributeReference(name = "col1", dataType =
IntegerType)()
private val col2Integer = AttributeReference(name = "col2", dataType =
IntegerType)()
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]