This is an automated email from the ASF dual-hosted git repository.

bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 8f6183563 [KYUUBI #4903] [AUTHZ] Fix NoSuchElementException when 
listing database in CatalogImpl in Spark 3.4
8f6183563 is described below

commit 8f61835630173684e23027f8a789bb6b531476e3
Author: zhouyifan279 <[email protected]>
AuthorDate: Wed May 31 13:05:15 2023 +0800

    [KYUUBI #4903] [AUTHZ] Fix NoSuchElementException when listing database in 
CatalogImpl in Spark 3.4
    
    ### _Why are the changes needed?_
    Fix #4902
    
    We changed `ObjectFilterPlaceHolder` to extend `UnaryNode` so that 
`CatalogImpl#listDatabases()` can get `ShowNamespaces` object in LogicalPlan.
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run 
test](https://kyuubi.readthedocs.io/en/master/develop_tools/testing.html#running-tests)
 locally before make a pull request
    
    Closes #4903 from zhouyifan279/ShowNamespaces.
    
    Closes #4903
    
    8bf3e1391 [zhouyifan279] [KYUUBI#4902] Fix NoSuchElementException when 
listing database in CatalogImpl in Spark 3.4
    8698b4a48 [zhouyifan279] [KYUUBI#4902] Fix NoSuchElementException when 
listing database in CatalogImpl in Spark 3.4
    a9ad36051 [zhouyifan279] [KYUUBI#4902] Fix NoSuchElementException when 
listing database in CatalogImpl in Spark 3.4
    78d3d6336 [zhouyifan279] [KYUUBI#4902] Fix NoSuchElementException when 
listing database in CatalogImpl in Spark 3.4
    
    Authored-by: zhouyifan279 <[email protected]>
    Signed-off-by: liangbowen <[email protected]>
---
 .../spark/authz/ranger/FilterDataSourceV2Strategy.scala  |  9 ++++++++-
 .../spark/authz/util/ObjectFilterPlaceHolder.scala       | 16 +++++++++++++---
 2 files changed, 21 insertions(+), 4 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/FilterDataSourceV2Strategy.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/FilterDataSourceV2Strategy.scala
index d39aacdcf..cbf79581e 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/FilterDataSourceV2Strategy.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/FilterDataSourceV2Strategy.scala
@@ -17,13 +17,20 @@
 package org.apache.kyuubi.plugin.spark.authz.ranger
 
 import org.apache.spark.sql.{SparkSession, Strategy}
-import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project}
 import org.apache.spark.sql.execution.SparkPlan
 
 import org.apache.kyuubi.plugin.spark.authz.util.ObjectFilterPlaceHolder
 
 class FilterDataSourceV2Strategy(spark: SparkSession) extends Strategy {
   override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
+    // For Spark 3.1 and below, `ColumnPruning` rule will set 
`ObjectFilterPlaceHolder#child` to
+    // `Project`
+    case ObjectFilterPlaceHolder(Project(_, child)) if child.nodeName == 
"ShowNamespaces" =>
+      spark.sessionState.planner.plan(child)
+        .map(FilteredShowNamespaceExec(_, spark.sparkContext)).toSeq
+
+    // For Spark 3.2 and above
     case ObjectFilterPlaceHolder(child) if child.nodeName == "ShowNamespaces" 
=>
       spark.sessionState.planner.plan(child)
         .map(FilteredShowNamespaceExec(_, spark.sparkContext)).toSeq
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/ObjectFilterPlaceHolder.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/ObjectFilterPlaceHolder.scala
index a5d1c0d3b..0d3c39adb 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/ObjectFilterPlaceHolder.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/util/ObjectFilterPlaceHolder.scala
@@ -18,9 +18,19 @@
 package org.apache.kyuubi.plugin.spark.authz.util
 
 import org.apache.spark.sql.catalyst.expressions.Attribute
-import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, 
Statistics}
+import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, UnaryNode}
+
+case class ObjectFilterPlaceHolder(child: LogicalPlan) extends UnaryNode
+  with WithInternalChild {
 
-case class ObjectFilterPlaceHolder(child: LogicalPlan) extends LeafNode {
   override def output: Seq[Attribute] = child.output
-  override def computeStats(): Statistics = child.stats
+
+  override def withNewChildInternal(newChild: LogicalPlan): LogicalPlan = {
+    // `FilterDataSourceV2Strategy` requires child.nodename not changed
+    if (child.nodeName == newChild.nodeName) {
+      copy(newChild)
+    } else {
+      this
+    }
+  }
 }

Reply via email to