This is an automated email from the ASF dual-hosted git repository.
wangzhen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new e779b424d [KYUUBI #5816] Change spark rule class to object or case
class
e779b424d is described below
commit e779b424df4508ad2872619aaa87a6516f89f354
Author: zml1206 <[email protected]>
AuthorDate: Wed Dec 6 11:00:33 2023 +0800
[KYUUBI #5816] Change spark rule class to object or case class
# :mag: Description
## Issue References ๐
This pull request fixes #5816
## Describe Your Solution ๐ง
## Types of changes :bookmark:
- [ ] Bugfix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing
functionality to change)
## Test Plan ๐งช
#### Behavior Without This Pull Request :coffin:
#### Behavior With This Pull Request :tada:
#### Related Unit Tests
---
# Checklists
## ๐ Author Self Checklist
- [ ] My code follows the [style
guidelines](https://kyuubi.readthedocs.io/en/master/contributing/code/style.html)
of this project
- [ ] I have performed a self-review
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] I have made corresponding changes to the documentation
- [ ] My changes generate no new warnings
- [ ] I have added tests that prove my fix is effective or that my feature
works
- [ ] New and existing unit tests pass locally with my changes
- [ ] This patch was not authored or co-authored using [Generative
Tooling](https://www.apache.org/legal/generative-tooling.html)
## ๐ Committer Pre-Merge Checklist
- [ ] Pull request title is okay.
- [ ] No license issues.
- [ ] Milestone correctly set?
- [ ] Test coverage is ok
- [ ] Assignees are selected.
- [ ] Minimum number of approvals
- [ ] No changes are requested
**Be nice. Be informative.**
Closes #5817 from zml1206/KYUUBI-5816.
Closes #5816
437dd1f27 [zml1206] Change spark rule class to object or case class
Authored-by: zml1206 <[email protected]>
Signed-off-by: wforget <[email protected]>
---
.../kyuubi/sql/KyuubiSparkSQLCommonExtension.scala | 2 +-
.../org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala | 2 +-
.../org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala | 2 +-
.../kyuubi/sql/KyuubiSparkSQLCommonExtension.scala | 2 +-
.../org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala | 2 +-
.../org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala | 2 +-
.../sql/watchdog/KyuubiUnsupportedOperationsCheck.scala | 2 +-
.../org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala | 2 +-
.../sql/watchdog/KyuubiUnsupportedOperationsCheck.scala | 2 +-
.../sql/watchdog/KyuubiUnsupportedOperationsCheck.scala | 2 +-
.../plugin/spark/authz/ranger/RangerSparkExtension.scala | 16 ++++++++--------
.../plugin/spark/authz/ranger/RuleAuthorization.scala | 2 +-
.../plugin/spark/authz/rule/RuleEliminateMarker.scala | 2 +-
.../authz/rule/RuleEliminatePermanentViewMarker.scala | 2 +-
.../plugin/spark/authz/rule/RuleEliminateTypeOf.scala | 2 +-
.../authz/rule/expression/RuleApplyTypeOfMarker.scala | 2 +-
.../permanentview/RuleApplyPermanentViewMarker.scala | 2 +-
.../rule/rowfilter/FilterDataSourceV2Strategy.scala | 2 +-
.../rule/rowfilter/RuleReplaceShowObjectCommands.scala | 2 +-
19 files changed, 26 insertions(+), 26 deletions(-)
diff --git
a/extensions/spark/kyuubi-extension-spark-3-1/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLCommonExtension.scala
b/extensions/spark/kyuubi-extension-spark-3-1/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLCommonExtension.scala
index 62aa88b98..3dda669a8 100644
---
a/extensions/spark/kyuubi-extension-spark-3-1/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLCommonExtension.scala
+++
b/extensions/spark/kyuubi-extension-spark-3-1/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLCommonExtension.scala
@@ -44,7 +44,7 @@ object KyuubiSparkSQLCommonExtension {
extensions.injectQueryStagePrepRule(_ => InsertShuffleNodeBeforeJoin)
- extensions.injectPostHocResolutionRule(session =>
MarkNumOutputColumnsRule(session))
+ extensions.injectPostHocResolutionRule(MarkNumOutputColumnsRule(_))
extensions.injectQueryStagePrepRule(FinalStageConfigIsolation(_))
}
}
diff --git
a/extensions/spark/kyuubi-extension-spark-3-1/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
b/extensions/spark/kyuubi-extension-spark-3-1/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
index 31765349b..f61eb731e 100644
---
a/extensions/spark/kyuubi-extension-spark-3-1/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
+++
b/extensions/spark/kyuubi-extension-spark-3-1/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
@@ -39,7 +39,7 @@ class KyuubiSparkSQLExtension extends (SparkSessionExtensions
=> Unit) {
extensions.injectPostHocResolutionRule(DropIgnoreNonexistent)
// watchdog extension
- extensions.injectCheckRule(_ => new KyuubiUnsupportedOperationsCheck)
+ extensions.injectCheckRule(_ => KyuubiUnsupportedOperationsCheck)
extensions.injectOptimizerRule(ForcedMaxOutputRowsRule)
extensions.injectPlannerStrategy(MaxScanStrategy)
}
diff --git
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
b/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
index 4d1184e33..9a0f5b1bb 100644
---
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
+++
b/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
@@ -37,7 +37,7 @@ class KyuubiSparkSQLExtension extends (SparkSessionExtensions
=> Unit) {
extensions.injectPostHocResolutionRule(DropIgnoreNonexistent)
// watchdog extension
- extensions.injectCheckRule(_ => new KyuubiUnsupportedOperationsCheck)
+ extensions.injectCheckRule(_ => KyuubiUnsupportedOperationsCheck)
extensions.injectOptimizerRule(ForcedMaxOutputRowsRule)
extensions.injectPlannerStrategy(MaxScanStrategy)
}
diff --git
a/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLCommonExtension.scala
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLCommonExtension.scala
index 170b5a165..c001ffc6c 100644
---
a/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLCommonExtension.scala
+++
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLCommonExtension.scala
@@ -44,7 +44,7 @@ object KyuubiSparkSQLCommonExtension {
extensions.injectQueryStagePrepRule(_ => InsertShuffleNodeBeforeJoin)
- extensions.injectPostHocResolutionRule(session =>
MarkNumOutputColumnsRule(session))
+ extensions.injectPostHocResolutionRule(MarkNumOutputColumnsRule(_))
extensions.injectQueryStagePrepRule(FinalStageConfigIsolation(_))
}
}
diff --git
a/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
index 038190953..fd11fb5f5 100644
---
a/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
+++
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
@@ -37,7 +37,7 @@ class KyuubiSparkSQLExtension extends (SparkSessionExtensions
=> Unit) {
extensions.injectPostHocResolutionRule(DropIgnoreNonexistent)
// watchdog extension
- extensions.injectCheckRule(_ => new KyuubiUnsupportedOperationsCheck)
+ extensions.injectCheckRule(_ => KyuubiUnsupportedOperationsCheck)
extensions.injectOptimizerRule(ForcedMaxOutputRowsRule)
extensions.injectPlannerStrategy(MaxScanStrategy)
diff --git
a/extensions/spark/kyuubi-extension-spark-3-4/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
b/extensions/spark/kyuubi-extension-spark-3-4/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
index 038190953..fd11fb5f5 100644
---
a/extensions/spark/kyuubi-extension-spark-3-4/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
+++
b/extensions/spark/kyuubi-extension-spark-3-4/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
@@ -37,7 +37,7 @@ class KyuubiSparkSQLExtension extends (SparkSessionExtensions
=> Unit) {
extensions.injectPostHocResolutionRule(DropIgnoreNonexistent)
// watchdog extension
- extensions.injectCheckRule(_ => new KyuubiUnsupportedOperationsCheck)
+ extensions.injectCheckRule(_ => KyuubiUnsupportedOperationsCheck)
extensions.injectOptimizerRule(ForcedMaxOutputRowsRule)
extensions.injectPlannerStrategy(MaxScanStrategy)
diff --git
a/extensions/spark/kyuubi-extension-spark-3-4/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
b/extensions/spark/kyuubi-extension-spark-3-4/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
index 6f6ecb3ce..2b4d3940a 100644
---
a/extensions/spark/kyuubi-extension-spark-3-4/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
+++
b/extensions/spark/kyuubi-extension-spark-3-4/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
@@ -22,7 +22,7 @@ import
org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, ScriptTransform
import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}
-class KyuubiUnsupportedOperationsCheck extends (LogicalPlan => Unit) with
SQLConfHelper {
+object KyuubiUnsupportedOperationsCheck extends (LogicalPlan => Unit) with
SQLConfHelper {
override def apply(plan: LogicalPlan): Unit =
conf.getConf(KyuubiSQLConf.SCRIPT_TRANSFORMATION_ENABLED) match {
case false => plan foreach {
diff --git
a/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
b/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
index 038190953..fd11fb5f5 100644
---
a/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
+++
b/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
@@ -37,7 +37,7 @@ class KyuubiSparkSQLExtension extends (SparkSessionExtensions
=> Unit) {
extensions.injectPostHocResolutionRule(DropIgnoreNonexistent)
// watchdog extension
- extensions.injectCheckRule(_ => new KyuubiUnsupportedOperationsCheck)
+ extensions.injectCheckRule(_ => KyuubiUnsupportedOperationsCheck)
extensions.injectOptimizerRule(ForcedMaxOutputRowsRule)
extensions.injectPlannerStrategy(MaxScanStrategy)
diff --git
a/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
b/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
index 6f6ecb3ce..2b4d3940a 100644
---
a/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
+++
b/extensions/spark/kyuubi-extension-spark-3-5/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
@@ -22,7 +22,7 @@ import
org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, ScriptTransform
import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}
-class KyuubiUnsupportedOperationsCheck extends (LogicalPlan => Unit) with
SQLConfHelper {
+object KyuubiUnsupportedOperationsCheck extends (LogicalPlan => Unit) with
SQLConfHelper {
override def apply(plan: LogicalPlan): Unit =
conf.getConf(KyuubiSQLConf.SCRIPT_TRANSFORMATION_ENABLED) match {
case false => plan foreach {
diff --git
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
b/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
index 6f6ecb3ce..2b4d3940a 100644
---
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
+++
b/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
@@ -22,7 +22,7 @@ import
org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, ScriptTransform
import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}
-class KyuubiUnsupportedOperationsCheck extends (LogicalPlan => Unit) with
SQLConfHelper {
+object KyuubiUnsupportedOperationsCheck extends (LogicalPlan => Unit) with
SQLConfHelper {
override def apply(plan: LogicalPlan): Unit =
conf.getConf(KyuubiSQLConf.SCRIPT_TRANSFORMATION_ENABLED) match {
case false => plan foreach {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtension.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtension.scala
index 93c10068a..288719f07 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtension.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtension.scala
@@ -45,16 +45,16 @@ class RangerSparkExtension extends (SparkSessionExtensions
=> Unit) {
override def apply(v1: SparkSessionExtensions): Unit = {
v1.injectCheckRule(AuthzConfigurationChecker)
- v1.injectResolutionRule(_ => new RuleReplaceShowObjectCommands())
- v1.injectResolutionRule(_ => new RuleApplyPermanentViewMarker())
- v1.injectResolutionRule(_ => new RuleApplyTypeOfMarker())
+ v1.injectResolutionRule(_ => RuleReplaceShowObjectCommands)
+ v1.injectResolutionRule(_ => RuleApplyPermanentViewMarker)
+ v1.injectResolutionRule(_ => RuleApplyTypeOfMarker)
v1.injectResolutionRule(RuleApplyRowFilter)
v1.injectResolutionRule(RuleApplyDataMaskingStage0)
v1.injectResolutionRule(RuleApplyDataMaskingStage1)
- v1.injectOptimizerRule(_ => new RuleEliminateMarker())
- v1.injectOptimizerRule(new RuleAuthorization(_))
- v1.injectOptimizerRule(new RuleEliminatePermanentViewMarker(_))
- v1.injectOptimizerRule(_ => new RuleEliminateTypeOf())
- v1.injectPlannerStrategy(new FilterDataSourceV2Strategy(_))
+ v1.injectOptimizerRule(_ => RuleEliminateMarker)
+ v1.injectOptimizerRule(RuleAuthorization)
+ v1.injectOptimizerRule(RuleEliminatePermanentViewMarker)
+ v1.injectOptimizerRule(_ => RuleEliminateTypeOf)
+ v1.injectPlannerStrategy(FilterDataSourceV2Strategy)
}
}
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleAuthorization.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleAuthorization.scala
index c5c39c511..e25cd2a70 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleAuthorization.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleAuthorization.scala
@@ -29,7 +29,7 @@ import
org.apache.kyuubi.plugin.spark.authz.ranger.SparkRangerAdminPlugin._
import org.apache.kyuubi.plugin.spark.authz.rule.Authorization
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
-class RuleAuthorization(spark: SparkSession) extends Authorization(spark) {
+case class RuleAuthorization(spark: SparkSession) extends Authorization(spark)
{
override def checkPrivileges(spark: SparkSession, plan: LogicalPlan): Unit =
{
val auditHandler = new SparkRangerAuditHandler
val ugi = getAuthzUgi(spark.sparkContext)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminateMarker.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminateMarker.scala
index 3da11ad05..a3a22a5f3 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminateMarker.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminateMarker.scala
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.rules.Rule
import
org.apache.kyuubi.plugin.spark.authz.rule.datamasking.{DataMaskingStage0Marker,
DataMaskingStage1Marker}
import org.apache.kyuubi.plugin.spark.authz.rule.rowfilter.RowFilterMarker
-class RuleEliminateMarker extends Rule[LogicalPlan] {
+object RuleEliminateMarker extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = {
plan.transformUp { case p =>
p.transformExpressionsUp {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminatePermanentViewMarker.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminatePermanentViewMarker.scala
index 00d78d47a..003521c72 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminatePermanentViewMarker.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminatePermanentViewMarker.scala
@@ -27,7 +27,7 @@ import
org.apache.kyuubi.plugin.spark.authz.rule.permanentview.PermanentViewMark
/**
* Transforming up [[PermanentViewMarker]]
*/
-class RuleEliminatePermanentViewMarker(sparkSession: SparkSession) extends
Rule[LogicalPlan] {
+case class RuleEliminatePermanentViewMarker(sparkSession: SparkSession)
extends Rule[LogicalPlan] {
def eliminatePVM(plan: LogicalPlan): LogicalPlan = {
plan.transformUp {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminateTypeOf.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminateTypeOf.scala
index d474383f6..0f3ae136c 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminateTypeOf.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminateTypeOf.scala
@@ -23,7 +23,7 @@ import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.kyuubi.plugin.spark.authz.rule.expression.TypeOfPlaceHolder
-class RuleEliminateTypeOf extends Rule[LogicalPlan] {
+object RuleEliminateTypeOf extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = {
plan.transformUp { case p =>
p.transformExpressionsUp {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/expression/RuleApplyTypeOfMarker.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/expression/RuleApplyTypeOfMarker.scala
index 411977624..8d47c56f7 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/expression/RuleApplyTypeOfMarker.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/expression/RuleApplyTypeOfMarker.scala
@@ -21,7 +21,7 @@ import org.apache.spark.sql.catalyst.expressions.TypeOf
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.rules.Rule
-class RuleApplyTypeOfMarker extends Rule[LogicalPlan] {
+object RuleApplyTypeOfMarker extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = {
plan transformAllExpressions {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/permanentview/RuleApplyPermanentViewMarker.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/permanentview/RuleApplyPermanentViewMarker.scala
index fdea01490..a84ecec8c 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/permanentview/RuleApplyPermanentViewMarker.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/permanentview/RuleApplyPermanentViewMarker.scala
@@ -31,7 +31,7 @@ import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
* [[PermanentViewMarker]] must be transformed up later
* in
[[org.apache.kyuubi.plugin.spark.authz.rule.RuleEliminatePermanentViewMarker]]
optimizer.
*/
-class RuleApplyPermanentViewMarker extends Rule[LogicalPlan] {
+object RuleApplyPermanentViewMarker extends Rule[LogicalPlan] {
private def resolveSubqueryExpression(
plan: LogicalPlan,
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala
index 17c766555..e268ed6bc 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala
@@ -20,7 +20,7 @@ import org.apache.spark.sql.{SparkSession, Strategy}
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project}
import org.apache.spark.sql.execution.SparkPlan
-class FilterDataSourceV2Strategy(spark: SparkSession) extends Strategy {
+case class FilterDataSourceV2Strategy(spark: SparkSession) extends Strategy {
override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
// For Spark 3.1 and below, `ColumnPruning` rule will set
`ObjectFilterPlaceHolder#child` to
// `Project`
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/RuleReplaceShowObjectCommands.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/RuleReplaceShowObjectCommands.scala
index 1728234a8..06982d701 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/RuleReplaceShowObjectCommands.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/RuleReplaceShowObjectCommands.scala
@@ -29,7 +29,7 @@ import
org.apache.kyuubi.plugin.spark.authz.ranger.{AccessRequest, AccessResourc
import org.apache.kyuubi.plugin.spark.authz.util.{AuthZUtils,
WithInternalChildren}
import org.apache.kyuubi.util.reflect.ReflectUtils._
-class RuleReplaceShowObjectCommands extends Rule[LogicalPlan] {
+object RuleReplaceShowObjectCommands extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan match {
case r: RunnableCommand if r.nodeName == "ShowTablesCommand" =>
FilteredShowTablesCommand(r)
case n: LogicalPlan if n.nodeName == "ShowTables" =>