This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new aa71e089c [KYUUBI #5657][AUTHZ] Support path privilege of 
AlterTableSetLocationCommand/AlterTableAddPartitionCommand
aa71e089c is described below

commit aa71e089c312d82580a44ac2a0dfe691dd394f5b
Author: Angerszhuuuu <[email protected]>
AuthorDate: Thu Nov 9 15:20:43 2023 +0800

    [KYUUBI #5657][AUTHZ] Support path privilege of 
AlterTableSetLocationCommand/AlterTableAddPartitionCommand
    
    ### _Why are the changes needed?_
    To close #5657
     Support path privilege of 
AlterTableSetLocationCommand/AlterTableAddPartitionCommand
    
    ### _How was this patch tested?_
    - [x] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [ ] [Run 
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
 locally before make a pull request
    
    ### _Was this patch authored or co-authored using generative AI tooling?_
    No
    
    Closes #5658 from AngersZhuuuu/KYUUBO-5657.
    
    Closes #5657
    
    48056192f [Angerszhuuuu] Merge branch 'master' into KYUUBO-5657
    ca62b5d24 [Angerszhuuuu] Update PrivilegesBuilderSuite.scala
    aef7e1198 [Angerszhuuuu] Update PrivilegesBuilderSuite.scala
    338ae169b [Angerszhuuuu] [KYUUBI #5657][AUTHZ] Support path privilege of 
AlterTableSetLocationCommand/AlterTableAddPartitionCommand
    
    Authored-by: Angerszhuuuu <[email protected]>
    Signed-off-by: Cheng Pan <[email protected]>
---
 ...he.kyuubi.plugin.spark.authz.serde.URIExtractor |  1 +
 .../src/main/resources/table_command_spec.json     | 12 +++++-
 .../plugin/spark/authz/serde/uriExtractors.scala   |  6 +++
 .../spark/authz/PrivilegesBuilderSuite.scala       | 31 ++++++++++------
 .../plugin/spark/authz/gen/TableCommands.scala     |  8 +++-
 .../authz/ranger/RangerSparkExtensionSuite.scala   | 43 ++++++++++++++++++++++
 6 files changed, 86 insertions(+), 15 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
index f40122615..bad6aba57 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
@@ -17,6 +17,7 @@
 
 org.apache.kyuubi.plugin.spark.authz.serde.CatalogStorageFormatURIExtractor
 org.apache.kyuubi.plugin.spark.authz.serde.BaseRelationFileIndexURIExtractor
+org.apache.kyuubi.plugin.spark.authz.serde.PartitionLocsSeqURIExtractor
 org.apache.kyuubi.plugin.spark.authz.serde.PropertiesPathUriExtractor
 org.apache.kyuubi.plugin.spark.authz.serde.PropertiesLocationUriExtractor
 org.apache.kyuubi.plugin.spark.authz.serde.StringURIExtractor
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index a71052324..3c52998cd 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -711,7 +711,11 @@
   } ],
   "opType" : "ALTERTABLE_ADDPARTS",
   "queryDescs" : [ ],
-  "uriDescs" : [ ]
+  "uriDescs" : [ {
+    "fieldName" : "partitionSpecsAndLocs",
+    "fieldExtractor" : "PartitionLocsSeqURIExtractor",
+    "isInput" : false
+  } ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AlterTableChangeColumnCommand",
   "tableDescs" : [ {
@@ -835,7 +839,11 @@
   } ],
   "opType" : "ALTERTABLE_LOCATION",
   "queryDescs" : [ ],
-  "uriDescs" : [ ]
+  "uriDescs" : [ {
+    "fieldName" : "location",
+    "fieldExtractor" : "StringURIExtractor",
+    "isInput" : false
+  } ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AlterTableSetPropertiesCommand",
   "tableDescs" : [ {
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
index b43d27057..6e0c232d2 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
@@ -73,3 +73,9 @@ class BaseRelationFileIndexURIExtractor extends URIExtractor {
     }
   }
 }
+
+class PartitionLocsSeqURIExtractor extends URIExtractor {
+  override def apply(v1: AnyRef): Seq[Uri] = {
+    v1.asInstanceOf[Seq[(_, Option[String])]].flatMap(_._2).map(Uri)
+  }
+}
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
index a331faf61..214a03754 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
@@ -306,17 +306,26 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
     val (in, out, operationType) = PrivilegesBuilder.build(plan, spark)
 
     assert(in.isEmpty)
-    assert(out.size === 1)
-    val po = out.head
-    assert(po.actionType === PrivilegeObjectActionType.OTHER)
-    assert(po.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
-    assert(po.catalog.isEmpty)
-    assertEqualsIgnoreCase(reusedDb)(po.dbname)
-    assertEqualsIgnoreCase(reusedPartTableShort)(po.objectName)
-    assert(po.columns.head === "pid")
-    checkTableOwner(po)
-    val accessType = ranger.AccessType(po, operationType, isInput = false)
-    assert(accessType === AccessType.ALTER)
+    assert(out.size === 2)
+    val po0 = out.head
+    assert(po0.actionType === PrivilegeObjectActionType.OTHER)
+    assert(po0.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
+    assert(po0.catalog.isEmpty)
+    assertEqualsIgnoreCase(reusedDb)(po0.dbname)
+    assertEqualsIgnoreCase(reusedPartTableShort)(po0.objectName)
+    assert(po0.columns.head === "pid")
+    checkTableOwner(po0)
+    val accessType0 = ranger.AccessType(po0, operationType, isInput = false)
+    assert(accessType0 === AccessType.ALTER)
+
+    val po1 = out.last
+    assert(po1.actionType === PrivilegeObjectActionType.OTHER)
+    assert(po1.catalog.isEmpty)
+    assert(po1.dbname === newLoc)
+    assert(po1.columns === Seq.empty)
+    checkTableOwner(po1)
+    val accessType1 = ranger.AccessType(po1, operationType, isInput = false)
+    assert(accessType1 === AccessType.WRITE)
   }
 
   test("AlterTable(Un)SetPropertiesCommand") {
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
index 9c264f705..6a3e1d75a 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
@@ -78,10 +78,12 @@ object TableCommands extends CommandSpecs[TableCommandSpec] 
{
     val cmd = 
"org.apache.spark.sql.execution.command.AlterTableAddPartitionCommand"
     val columnDesc =
       ColumnDesc("partitionSpecsAndLocs", 
classOf[PartitionLocsSeqColumnExtractor])
+    val uriDesc = UriDesc("partitionSpecsAndLocs", 
classOf[PartitionLocsSeqURIExtractor])
     TableCommandSpec(
       cmd,
       Seq(tableNameDesc.copy(columnDesc = Some(columnDesc))),
-      ALTERTABLE_ADDPARTS)
+      ALTERTABLE_ADDPARTS,
+      uriDescs = Seq(uriDesc))
   }
 
   val AlterTableChangeColumn = {
@@ -150,10 +152,12 @@ object TableCommands extends 
CommandSpecs[TableCommandSpec] {
   val AlterTableSetLocation = {
     val cmd = 
"org.apache.spark.sql.execution.command.AlterTableSetLocationCommand"
     val columnDesc = ColumnDesc("partitionSpec", 
classOf[PartitionOptionColumnExtractor])
+    val uriDesc = UriDesc("location", classOf[StringURIExtractor])
     TableCommandSpec(
       cmd,
       Seq(tableNameDesc.copy(columnDesc = Some(columnDesc))),
-      ALTERTABLE_LOCATION)
+      ALTERTABLE_LOCATION,
+      uriDescs = Seq(uriDesc))
   }
 
   val AlterTableSetProperties = TableCommandSpec(
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
index e8e4b0220..eaa6a3fa2 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
@@ -1197,4 +1197,47 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
       }
     }
   }
+
+  test("AlterTableSetLocationCommand/AlterTableAddPartitionCommand") {
+    val db1 = defaultDb
+    val table1 = "table1"
+    val table2 = "table2"
+    withSingleCallEnabled {
+      withTempDir { path1 =>
+        withCleanTmpResources(Seq((s"$db1.$table1", "table"), 
(s"$db1.$table2", "table"))) {
+          doAs(
+            admin,
+            sql(
+              s"""
+                 |CREATE TABLE IF NOT EXISTS $db1.$table1(
+                 |id int,
+                 |scope int,
+                 |day string)
+                 |PARTITIONED BY (day)
+                 |""".stripMargin))
+          interceptContains[AccessControlException](
+            doAs(someone, sql(s"ALTER TABLE $db1.$table1 SET LOCATION 
'$path1'")))(
+            s"does not have [alter] privilege on [$db1/$table1], " +
+              s"[write] privilege on [[$path1, $path1/]]")
+
+          withTempDir { path2 =>
+            withTempDir { path3 =>
+              interceptContains[AccessControlException](
+                doAs(
+                  someone,
+                  sql(
+                    s"""
+                       |ALTER TABLE $db1.$table1
+                       |ADD
+                       |PARTITION (day='2023-01-01') LOCATION '$path2'
+                       |PARTITION (day='2023-01-02') LOCATION '$path3'
+                       |""".stripMargin)))(
+                s"does not have [alter] privilege on [$db1/$table1/day], " +
+                  s"[write] privilege on [[$path2, $path2/],[$path3, 
$path3/]]")
+            }
+          }
+        }
+      }
+    }
+  }
 }

Reply via email to