This is an automated email from the ASF dual-hosted git repository.

bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 1bf71a858f [KYUUBI #7202] [AUTHZ] make FilteredShowObjectsExec extends 
V2CommandExec to avoid running executor side task
1bf71a858f is described below

commit 1bf71a858f168693e82759025470ffd1ea81a0de
Author: Lennon Chin <[email protected]>
AuthorDate: Fri Sep 19 12:26:25 2025 +0800

    [KYUUBI #7202] [AUTHZ] make FilteredShowObjectsExec extends V2CommandExec 
to avoid running executor side task
    
    Now FilteredShowObjectsExec is not a subclass of V2CommandExec, when it was 
executed it will invoke `def doExecute(): RDD[InternalRow]` method to submit 
executor side task to run, which is slow and resource wasting.  V2CommandExec 
implemented `def executeCollect(): Array[InternalRow]` to avoid running `def 
doExecute(): RDD[InternalRow]`.
    
    ### Why are the changes needed?
    
    Improve perfomance.
    
    ### How was this patch tested?
    
    exists unit tests.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #7202 from LennonChin/authz_filtered_show_objects.
    
    Closes #7202
    
    10b55e661 [Lennon Chin] [AUTHZ] make FilteredShowObjectsExec extends 
V2CommandExec to avoid running executor side task
    
    Authored-by: Lennon Chin <[email protected]>
    Signed-off-by: Bowen Liang <[email protected]>
---
 .../rule/rowfilter/FilteredShowObjectsExec.scala     | 20 ++++++++++----------
 1 file changed, 10 insertions(+), 10 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilteredShowObjectsExec.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilteredShowObjectsExec.scala
index 0bb4213561..fd617161bb 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilteredShowObjectsExec.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilteredShowObjectsExec.scala
@@ -18,23 +18,17 @@ package org.apache.kyuubi.plugin.spark.authz.rule.rowfilter
 
 import org.apache.hadoop.security.UserGroupInformation
 import org.apache.spark.SparkContext
-import org.apache.spark.rdd.RDD
 import org.apache.spark.sql.catalyst.InternalRow
 import org.apache.spark.sql.catalyst.expressions.Attribute
 import org.apache.spark.sql.execution.{LeafExecNode, SparkPlan}
+import org.apache.spark.sql.execution.datasources.v2.V2CommandExec
 
 import org.apache.kyuubi.plugin.spark.authz.{ObjectType, OperationType}
 import org.apache.kyuubi.plugin.spark.authz.ranger.{AccessRequest, 
AccessResource, AccessType, SparkRangerAdminPlugin}
 import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils
 
-trait FilteredShowObjectsExec extends LeafExecNode {
+trait FilteredShowObjectsExec extends V2CommandExec with LeafExecNode {
   def result: Array[InternalRow]
-
-  override def output: Seq[Attribute]
-
-  final override def doExecute(): RDD[InternalRow] = {
-    sparkContext.parallelize(result, 1)
-  }
 }
 
 trait FilteredShowObjectsCheck {
@@ -42,7 +36,10 @@ trait FilteredShowObjectsCheck {
 }
 
 case class FilteredShowNamespaceExec(result: Array[InternalRow], output: 
Seq[Attribute])
-  extends FilteredShowObjectsExec {}
+  extends FilteredShowObjectsExec {
+  override protected def run(): Seq[InternalRow] = result
+}
+
 object FilteredShowNamespaceExec extends FilteredShowObjectsCheck {
   def apply(delegated: SparkPlan, sc: SparkContext): FilteredShowNamespaceExec 
= {
     val result = delegated.executeCollect()
@@ -60,7 +57,10 @@ object FilteredShowNamespaceExec extends 
FilteredShowObjectsCheck {
 }
 
 case class FilteredShowTablesExec(result: Array[InternalRow], output: 
Seq[Attribute])
-  extends FilteredShowObjectsExec {}
+  extends FilteredShowObjectsExec {
+  override protected def run(): Seq[InternalRow] = result
+}
+
 object FilteredShowTablesExec extends FilteredShowObjectsCheck {
   def apply(delegated: SparkPlan, sc: SparkContext): FilteredShowNamespaceExec 
= {
     val result = delegated.executeCollect()

Reply via email to