This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new e45f4d950 [KYUUBI #5630][Authz] Support path check of LoadDataCommand
e45f4d950 is described below

commit e45f4d950bb95841350e23ba1c576d57840e542b
Author: Angerszhuuuu <[email protected]>
AuthorDate: Mon Nov 6 19:02:20 2023 +0800

    [KYUUBI #5630][Authz] Support path check of LoadDataCommand
    
    ### _Why are the changes needed?_
    To close #5630
    Support path check of LoadDataCommand
    
    ### _How was this patch tested?_
    - [x] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [ ] [Run 
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
 locally before make a pull request
    
    ### _Was this patch authored or co-authored using generative AI tooling?_
    No
    
    Closes #5632 from AngersZhuuuu/KYUUBi-5630.
    
    Closes #5630
    
    885a1d7ed [Angerszhuuuu] [KYUUBI #5630][Authz] Support path check of 
LoadDataCommand
    
    Authored-by: Angerszhuuuu <[email protected]>
    Signed-off-by: Cheng Pan <[email protected]>
---
 .../src/main/resources/table_command_spec.json     |  7 +++++-
 .../spark/authz/PrivilegesBuilderSuite.scala       | 25 ++++++++++++++--------
 .../plugin/spark/authz/gen/TableCommands.scala     |  3 ++-
 .../authz/ranger/RangerSparkExtensionSuite.scala   | 22 +++++++++++++++++++
 4 files changed, 46 insertions(+), 11 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index b36bce824..4fc26a881 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -1128,7 +1128,12 @@
   } ],
   "opType" : "LOAD",
   "queryDescs" : [ ],
-  "uriDescs" : [ ]
+  "uriDescs" : [ {
+    "fieldName" : "path",
+    "fieldExtractor" : "StringURIExtractor",
+    "actionTypeDesc" : null,
+    "isInput" : true
+  } ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.RefreshTableCommand",
   "tableDescs" : [ {
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
index fbb50c242..d8ae3d244 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
@@ -1430,17 +1430,24 @@ class HiveCatalogPrivilegeBuilderSuite extends 
PrivilegesBuilderSuite {
         .queryExecution.analyzed
       val (in, out, operationType) = PrivilegesBuilder.build(plan, spark)
       assert(operationType === LOAD)
-      assert(in.isEmpty)
-
-      assert(out.size === 1)
-      val po0 = out.head
-      assert(po0.actionType === PrivilegeObjectActionType.INSERT_OVERWRITE)
-      assert(po0.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
-      assertEqualsIgnoreCase(reusedDb)(po0.dbname)
-      assert(po0.objectName equalsIgnoreCase tableName.split("\\.").last)
+      assert(in.size === 1)
+      val po0 = in.head
+      assert(po0.actionType === PrivilegeObjectActionType.OTHER)
+      assert(po0.privilegeObjectType === PrivilegeObjectType.DFS_URL)
+      assert(po0.dbname === dataPath)
+      assert(po0.objectName === null)
       assert(po0.columns.isEmpty)
       checkTableOwner(po0)
-      val accessType0 = ranger.AccessType(po0, operationType, isInput = false)
+
+      assert(out.size === 1)
+      val po1 = out.head
+      assert(po1.actionType === PrivilegeObjectActionType.INSERT_OVERWRITE)
+      assert(po1.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
+      assertEqualsIgnoreCase(reusedDb)(po1.dbname)
+      assert(po1.objectName equalsIgnoreCase tableName.split("\\.").last)
+      assert(po1.columns.isEmpty)
+      checkTableOwner(po1)
+      val accessType0 = ranger.AccessType(po1, operationType, isInput = false)
       assert(accessType0 === AccessType.UPDATE)
     }
   }
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
index 6007d689e..3eeb5dfe0 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
@@ -592,7 +592,8 @@ object TableCommands extends CommandSpecs[TableCommandSpec] 
{
       fieldName = "table",
       columnDesc = Some(columnDesc),
       actionTypeDesc = Some(actionTypeDesc))
-    TableCommandSpec(cmd, Seq(tableDesc), "LOAD")
+    val uriDesc = UriDesc("path", classOf[StringURIExtractor], isInput = true)
+    TableCommandSpec(cmd, Seq(tableDesc), LOAD, uriDescs = Seq(uriDesc))
   }
 
   val RefreshTable = {
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
index e8e4486ac..ca0d7abbb 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
@@ -1133,4 +1133,26 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
       }
     }
   }
+
+  test("LoadDataCommand") {
+    val db1 = defaultDb
+    val table1 = "table1"
+    withSingleCallEnabled {
+      withTempDir { path =>
+        withCleanTmpResources(Seq((s"$db1.$table1", "table"))) {
+          doAs(admin, sql(s"CREATE TABLE IF NOT EXISTS $db1.$table1 (id int, 
scope int)"))
+          val loadDataSql =
+            s"""
+               |LOAD DATA LOCAL INPATH '$path'
+               |OVERWRITE INTO TABLE $db1.$table1
+               |""".stripMargin
+          doAs(admin, sql(loadDataSql).explain(true))
+          interceptContains[AccessControlException](
+            doAs(someone, sql(loadDataSql).explain(true)))(
+            s"does not have [select] privilege on " +
+              s"[[$path, $path/]]")
+        }
+      }
+    }
+  }
 }

Reply via email to