This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 44d194dc4 [KYUUBI #5793][AUTHZ][BUG] PVM with nested scalar-subquery 
should not check src table privilege
44d194dc4 is described below

commit 44d194dc40272d0806f60b6edfd497a7f3bf0555
Author: Angerszhuuuu <[email protected]>
AuthorDate: Fri Dec 1 09:50:35 2023 +0800

    [KYUUBI #5793][AUTHZ][BUG] PVM with nested scalar-subquery should not check 
src table privilege
    
    # :mag: Description
    ## Issue References ๐Ÿ”—
    
    This pull request fixes #5793
    
    ## Describe Your Solution ๐Ÿ”ง
    For SQL have nested scalar-subquery, since the scalar-subquery in 
scalar-subquery was not wrapped by `PVM`, this pr fix this.
    Note :This bug is not imported by #5780
    
    ## Types of changes :bookmark:
    
    - [x] Bugfix (non-breaking change which fixes an issue)
    - [ ] New feature (non-breaking change which adds functionality)
    - [ ] Breaking change (fix or feature that would cause existing 
functionality to change)
    
    ## Test Plan ๐Ÿงช
    
    #### Behavior Without This Pull Request :coffin:
    ```
    CREATE VIEW $db1.$view1
    AS
    SELECT id, name, max(scope) as max_scope, sum(age) sum_age
    FROM $db1.$table2
    WHERE scope in (
        SELECT max(scope) max_scope
        FROM $db1.$table1
       WHERE id IN (SELECT id FROM $db1.$table3)
    )
    GROUP BY id, name
    ```
    
    when we query `$db1.$view1` and if we have `view1`'s privilege, it will 
throw
    ```
    Permission denied: user [user_perm_view_only] does not have [select] 
privilege on [default/table3/id]
    org.apache.kyuubi.plugin.spark.authz.AccessControlException: Permission 
denied: user [user_perm_view_only] does not have [select] privilege on 
[default/table3/id]
       at 
org.apache.kyuubi.plugin.spark.authz.ranger.SparkRangerAdminPlugin$.verify(SparkRangerAdminPlugin.scala:167)
    ```
    
    #### Behavior With This Pull Request :tada:
     Won't request `table3`'s privilege
    
    #### Related Unit Tests
    
    ---
    
    # Checklists
    ## ๐Ÿ“ Author Self Checklist
    
    - [x] My code follows the [style 
guidelines](https://kyuubi.readthedocs.io/en/master/contributing/code/style.html)
 of this project
    - [x] I have performed a self-review
    - [x] I have commented my code, particularly in hard-to-understand areas
    - [ ] I have made corresponding changes to the documentation
    - [x] My changes generate no new warnings
    - [x] I have added tests that prove my fix is effective or that my feature 
works
    - [x] New and existing unit tests pass locally with my changes
    - [x] This patch was not authored or co-authored using [Generative 
Tooling](https://www.apache.org/legal/generative-tooling.html)
    
    ## ๐Ÿ“ Committer Pre-Merge Checklist
    
    - [x] Pull request title is okay.
    - [x] No license issues.
    - [x] Milestone correctly set?
    - [x] Test coverage is ok
    - [x] Assignees are selected.
    - [x] Minimum number of approvals
    - [x] No changes are requested
    
    **Be nice. Be informative.**
    
    Closes #5796 from AngersZhuuuu/KYUUBI-5793.
    
    Closes #5793
    
    0f5ebc14a [Angerszhuuuu] Update RuleEliminatePermanentViewMarker.scala
    f364d892b [Angerszhuuuu] [KYUUBI #5793][BUG] PVM with nested scala-subquery 
should not src table privilege"
    
    Authored-by: Angerszhuuuu <[email protected]>
    Signed-off-by: Kent Yao <[email protected]>
---
 .../rule/RuleEliminatePermanentViewMarker.scala    | 16 +++++++-
 .../RuleApplyPermanentViewMarker.scala             | 20 +++++++---
 .../authz/ranger/RangerSparkExtensionSuite.scala   | 46 ++++++++++++++++++++++
 3 files changed, 76 insertions(+), 6 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminatePermanentViewMarker.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminatePermanentViewMarker.scala
index d468dcca6..00d78d47a 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminatePermanentViewMarker.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/RuleEliminatePermanentViewMarker.scala
@@ -28,13 +28,27 @@ import 
org.apache.kyuubi.plugin.spark.authz.rule.permanentview.PermanentViewMark
  * Transforming up [[PermanentViewMarker]]
  */
 class RuleEliminatePermanentViewMarker(sparkSession: SparkSession) extends 
Rule[LogicalPlan] {
+
+  def eliminatePVM(plan: LogicalPlan): LogicalPlan = {
+    plan.transformUp {
+      case pvm: PermanentViewMarker =>
+        val ret = pvm.child.transformAllExpressions {
+          case s: SubqueryExpression => s.withNewPlan(eliminatePVM(s.plan))
+        }
+        // For each SubqueryExpression's PVM, we should mark as resolved to
+        // avoid check privilege of PVM's internal Subquery.
+        Authorization.markAuthChecked(ret)
+        ret
+    }
+  }
+
   override def apply(plan: LogicalPlan): LogicalPlan = {
     var matched = false
     val eliminatedPVM = plan.transformUp {
       case pvm: PermanentViewMarker =>
         matched = true
         pvm.child.transformAllExpressions {
-          case s: SubqueryExpression => s.withNewPlan(apply(s.plan))
+          case s: SubqueryExpression => s.withNewPlan(eliminatePVM(s.plan))
         }
     }
     if (matched) {
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/permanentview/RuleApplyPermanentViewMarker.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/permanentview/RuleApplyPermanentViewMarker.scala
index b809d8f34..fdea01490 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/permanentview/RuleApplyPermanentViewMarker.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/permanentview/RuleApplyPermanentViewMarker.scala
@@ -17,6 +17,7 @@
 
 package org.apache.kyuubi.plugin.spark.authz.rule.permanentview
 
+import org.apache.spark.sql.catalyst.catalog.CatalogTable
 import org.apache.spark.sql.catalyst.expressions.SubqueryExpression
 import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, View}
 import org.apache.spark.sql.catalyst.rules.Rule
@@ -32,15 +33,24 @@ import 
org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
  */
 class RuleApplyPermanentViewMarker extends Rule[LogicalPlan] {
 
+  private def resolveSubqueryExpression(
+      plan: LogicalPlan,
+      catalogTable: CatalogTable): LogicalPlan = {
+    plan.transformAllExpressions {
+      case subquery: SubqueryExpression =>
+        subquery.withNewPlan(plan = PermanentViewMarker(
+          resolveSubqueryExpression(subquery.plan, catalogTable),
+          catalogTable))
+    }
+  }
+
   override def apply(plan: LogicalPlan): LogicalPlan = {
     plan mapChildren {
       case p: PermanentViewMarker => p
       case permanentView: View if hasResolvedPermanentView(permanentView) =>
-        val resolved = permanentView.transformAllExpressions {
-          case subquery: SubqueryExpression =>
-            subquery.withNewPlan(plan = PermanentViewMarker(subquery.plan, 
permanentView.desc))
-        }
-        PermanentViewMarker(resolved, permanentView.desc)
+        PermanentViewMarker(
+          resolveSubqueryExpression(permanentView, permanentView.desc),
+          permanentView.desc)
       case other => apply(other)
     }
   }
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
index c62cda5fa..b2c23f4ef 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
@@ -1368,4 +1368,50 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
       }
     }
   }
+
+  test("[KYUUBI #5793][BUG] PVM with nested scala-subquery should not src 
table privilege") {
+    val db1 = defaultDb
+    val table1 = "table1"
+    val table2 = "table2"
+    val table3 = "table3"
+    val view1 = "perm_view"
+    withSingleCallEnabled {
+      withCleanTmpResources(
+        Seq(
+          (s"$db1.$table1", "table"),
+          (s"$db1.$table2", "table"),
+          (s"$db1.$table3", "table"),
+          (s"$db1.$view1", "view"))) {
+        doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db1.$table1(id int, 
scope int)"))
+        doAs(
+          admin,
+          sql(
+            s"""
+               | CREATE TABLE IF NOT EXISTS $db1.$table2(
+               |  id int,
+               |  name string,
+               |  age int,
+               |  scope int)
+               | """.stripMargin))
+        doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db1.$table3(id int, 
scope int)"))
+        doAs(
+          admin,
+          sql(
+            s"""
+               |CREATE VIEW $db1.$view1
+               |AS
+               |SELECT id, name, max(scope) as max_scope, sum(age) sum_age
+               |FROM $db1.$table2
+               |WHERE scope in (
+               |    SELECT max(scope) max_scope
+               |    FROM $db1.$table1
+               |    WHERE id IN (SELECT id FROM $db1.$table3)
+               |)
+               |GROUP BY id, name
+               |""".stripMargin))
+
+        checkAnswer(permViewOnlyUser, s"SELECT * FROM $db1.$view1", 
Array.empty[Row])
+      }
+    }
+  }
 }

Reply via email to