This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new b99a21658 [KYUUBI #5455][AUTHZ] Support hudi 
CompactionHoodiePathCommand & CompactionShowHoodiePathCommand
b99a21658 is described below

commit b99a21658a58139eea9a3f3e7a5e38a2c2695718
Author: Angerszhuuuu <[email protected]>
AuthorDate: Fri Oct 27 13:42:25 2023 +0800

    [KYUUBI #5455][AUTHZ] Support hudi CompactionHoodiePathCommand & 
CompactionShowHoodiePathCommand
    
    ### _Why are the changes needed?_
    To close #5455. Kyuubi authz support hudi  CompactionHoodiePathCommand & 
CompactionShowHoodiePathCommand
    
    - CompactionHoodiePathCommand: 
https://github.com/apache/hudi/blob/release-0.12.0/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/CompactionHoodiePathCommand.scala
    - CompactionShowHoodiePathCommand : 
https://github.com/apache/hudi/blob/release-0.12.0/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/CompactionShowHoodiePathCommand.scala
    
    ### _How was this patch tested?_
    - [x] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [ ] [Run 
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
 locally before make a pull request
    
    ### _Was this patch authored or co-authored using generative AI tooling?_
    No
    
    Closes #5527 from AngersZhuuuu/KYUUBi-5455.
    
    Closes #5455
    
    7179e2e54 [Angerszhuuuu] Update PrivilegeObject.scala
    a72c9e76f [Angerszhuuuu] Update table_command_spec.json
    3dd6bcc14 [Angerszhuuuu] Merge branch 'master' into KYUUBi-5455
    b2f41b560 [Angerszhuuuu] done
    26477bc33 [Angerszhuuuu] follow comment
    bc9c99e3b [Angerszhuuuu] Update HudiCatalogRangerSparkExtensionSuite.scala
    894c692be [Angerszhuuuu] follow comment
    4831ae5d7 [Angerszhuuuu] Delete uri_command_spec.json
    674ca2177 [Angerszhuuuu] follow comment
    1755b6c23 [Angerszhuuuu] Update HudiCatalogRangerSparkExtensionSuite.scala
    07c69524e [Angerszhuuuu] Revert "Update style.yml"
    ce80bec39 [Angerszhuuuu] Update style.yml
    5aca29a0a [Angerszhuuuu] Update PrivilegeObject.scala
    e70458f30 [Angerszhuuuu] update
    0e185b031 [Angerszhuuuu] Update PrivilegesBuilder.scala
    3b4c5d899 [Angerszhuuuu] Update HudiCatalogRangerSparkExtensionSuite.scala
    60d191a45 [Angerszhuuuu] [KYUUBI #5455][AUTHZ] Support hudi 
CompactionHoodiePathCommand & CompactionShowHoodiePathCommand
    
    Authored-by: Angerszhuuuu <[email protected]>
    Signed-off-by: Kent Yao <[email protected]>
---
 ...he.kyuubi.plugin.spark.authz.serde.URIExtractor |  18 ++
 .../src/main/resources/table_command_spec.json     | 344 ++++++++++++++-------
 .../kyuubi/plugin/spark/authz/ObjectType.scala     |   3 +-
 .../plugin/spark/authz/PrivilegeObject.scala       |  18 +-
 .../plugin/spark/authz/PrivilegeObjectType.scala   |   2 +-
 .../plugin/spark/authz/PrivilegesBuilder.scala     |  17 +
 .../plugin/spark/authz/ranger/AccessResource.scala |  14 +
 .../plugin/spark/authz/ranger/AccessType.scala     |   3 +
 .../plugin/spark/authz/serde/CommandSpec.scala     |   3 +-
 .../plugin/spark/authz/serde/Descriptor.scala      |  18 ++
 .../{PrivilegeObjectType.scala => serde/Uri.scala} |  14 +-
 .../kyuubi/plugin/spark/authz/serde/package.scala  |   2 +
 .../pathExtractors.scala}                          |  19 +-
 .../plugin/spark/authz/gen/HudiCommands.scala      |  20 +-
 .../HudiCatalogRangerSparkExtensionSuite.scala     |  41 ++-
 15 files changed, 406 insertions(+), 130 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
new file mode 100644
index 000000000..0b77fa26e
--- /dev/null
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
@@ -0,0 +1,18 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+org.apache.kyuubi.plugin.spark.authz.serde.StringURIExtractor
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index af86ffd1c..9677e2298 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -11,7 +11,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_ADDCOLS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.AddPartitions",
   "tableDescs" : [ {
@@ -25,7 +26,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_ADDPARTS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.AlterColumn",
   "tableDescs" : [ {
@@ -39,7 +41,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_ADDCOLS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.AlterTable",
   "tableDescs" : [ {
@@ -53,7 +56,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_PROPERTIES",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.AppendData",
   "tableDescs" : [ {
@@ -74,7 +78,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.CacheTable",
   "tableDescs" : [ ],
@@ -82,7 +87,8 @@
   "queryDescs" : [ {
     "fieldName" : "table",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.CacheTableAsSelect",
   "tableDescs" : [ ],
@@ -90,7 +96,8 @@
   "queryDescs" : [ {
     "fieldName" : "plan",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.CommentOnTable",
   "tableDescs" : [ {
@@ -104,7 +111,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_PROPERTIES",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.CreateTable",
   "tableDescs" : [ {
@@ -139,7 +147,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "CREATETABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.CreateTableAsSelect",
   "tableDescs" : [ {
@@ -177,7 +186,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.CreateV2Table",
   "tableDescs" : [ {
@@ -194,7 +204,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "CREATETABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.DeleteFromTable",
   "tableDescs" : [ {
@@ -212,7 +223,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "QUERY",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.DescribeRelation",
   "tableDescs" : [ {
@@ -226,7 +238,8 @@
     "setCurrentDatabaseIfMissing" : true
   } ],
   "opType" : "DESCTABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.DropColumns",
   "tableDescs" : [ {
@@ -240,7 +253,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_ADDCOLS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.DropPartitions",
   "tableDescs" : [ {
@@ -254,7 +268,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_DROPPARTS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.DropTable",
   "tableDescs" : [ {
@@ -277,7 +292,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "DROPTABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.MergeIntoTable",
   "tableDescs" : [ {
@@ -298,7 +314,8 @@
   "queryDescs" : [ {
     "fieldName" : "sourceTable",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.OverwriteByExpression",
   "tableDescs" : [ {
@@ -319,7 +336,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.OverwritePartitionsDynamic",
   "tableDescs" : [ {
@@ -340,7 +358,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.RefreshTable",
   "tableDescs" : [ {
@@ -354,7 +373,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "QUERY",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.RenameColumn",
   "tableDescs" : [ {
@@ -368,7 +388,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_RENAMECOL",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.RenamePartitions",
   "tableDescs" : [ {
@@ -382,7 +403,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_RENAMEPART",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.RepairTable",
   "tableDescs" : [ {
@@ -396,7 +418,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "MSCK",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.ReplaceColumns",
   "tableDescs" : [ {
@@ -410,7 +433,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_REPLACECOLS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.ReplaceData",
   "tableDescs" : [ {
@@ -431,7 +455,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.ReplaceTable",
   "tableDescs" : [ {
@@ -466,7 +491,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "CREATETABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.ReplaceTableAsSelect",
   "tableDescs" : [ {
@@ -504,7 +530,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.ShowCreateTable",
   "tableDescs" : [ {
@@ -518,7 +545,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "SHOW_CREATETABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.ShowTableProperties",
   "tableDescs" : [ {
@@ -532,7 +560,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "SHOW_TBLPROPERTIES",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.TruncatePartition",
   "tableDescs" : [ {
@@ -546,7 +575,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_DROPPARTS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.TruncateTable",
   "tableDescs" : [ {
@@ -560,7 +590,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "TRUNCATETABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.UpdateTable",
   "tableDescs" : [ {
@@ -578,7 +609,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "QUERY",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AlterTableAddColumnsCommand",
   "tableDescs" : [ {
@@ -595,7 +627,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_ADDCOLS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AlterTableAddPartitionCommand",
   "tableDescs" : [ {
@@ -612,7 +645,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_ADDPARTS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AlterTableChangeColumnCommand",
   "tableDescs" : [ {
@@ -629,7 +663,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_REPLACECOLS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AlterTableDropPartitionCommand",
   "tableDescs" : [ {
@@ -646,7 +681,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_DROPPARTS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AlterTableRecoverPartitionsCommand",
   "tableDescs" : [ {
@@ -660,7 +696,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "MSCK",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AlterTableRenameCommand",
   "tableDescs" : [ {
@@ -678,7 +715,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_RENAME",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AlterTableRenamePartitionCommand",
   "tableDescs" : [ {
@@ -695,7 +733,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_RENAMEPART",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AlterTableSerDePropertiesCommand",
   "tableDescs" : [ {
@@ -712,7 +751,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_SERDEPROPERTIES",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AlterTableSetLocationCommand",
   "tableDescs" : [ {
@@ -729,7 +769,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_LOCATION",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AlterTableSetPropertiesCommand",
   "tableDescs" : [ {
@@ -743,7 +784,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_PROPERTIES",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AlterTableUnsetPropertiesCommand",
   "tableDescs" : [ {
@@ -757,7 +799,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_PROPERTIES",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.AlterViewAsCommand",
   "tableDescs" : [ {
@@ -778,7 +821,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.AnalyzeColumnCommand",
   "tableDescs" : [ {
@@ -816,7 +860,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_PROPERTIES",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AnalyzePartitionCommand",
   "tableDescs" : [ {
@@ -842,7 +887,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_PROPERTIES",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.AnalyzeTableCommand",
   "tableDescs" : [ {
@@ -865,7 +911,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_PROPERTIES",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.CacheTableCommand",
   "tableDescs" : [ ],
@@ -873,7 +920,8 @@
   "queryDescs" : [ {
     "fieldName" : "plan",
     "fieldExtractor" : "LogicalPlanOptionQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.CreateDataSourceTableAsSelectCommand",
   "tableDescs" : [ {
@@ -890,7 +938,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.CreateDataSourceTableCommand",
   "tableDescs" : [ {
@@ -904,7 +953,8 @@
     "setCurrentDatabaseIfMissing" : true
   } ],
   "opType" : "CREATETABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.CreateTableCommand",
   "tableDescs" : [ {
@@ -918,7 +968,8 @@
     "setCurrentDatabaseIfMissing" : true
   } ],
   "opType" : "CREATETABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.CreateTableLikeCommand",
   "tableDescs" : [ {
@@ -941,7 +992,8 @@
     "setCurrentDatabaseIfMissing" : true
   } ],
   "opType" : "CREATETABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.CreateViewCommand",
   "tableDescs" : [ {
@@ -965,7 +1017,8 @@
   }, {
     "fieldName" : "child",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.DescribeColumnCommand",
   "tableDescs" : [ {
@@ -982,7 +1035,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "DESCTABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.DescribeTableCommand",
   "tableDescs" : [ {
@@ -999,7 +1053,8 @@
     "setCurrentDatabaseIfMissing" : true
   } ],
   "opType" : "DESCTABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.DropTableCommand",
   "tableDescs" : [ {
@@ -1017,7 +1072,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "DROPTABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.InsertIntoDataSourceDirCommand",
   "tableDescs" : [ ],
@@ -1025,7 +1081,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.LoadDataCommand",
   "tableDescs" : [ {
@@ -1046,7 +1103,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "LOAD",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.RefreshTableCommand",
   "tableDescs" : [ {
@@ -1060,7 +1118,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "QUERY",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.RepairTableCommand",
   "tableDescs" : [ {
@@ -1074,7 +1133,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "MSCK",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.ShowColumnsCommand",
   "tableDescs" : [ {
@@ -1088,7 +1148,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "SHOWCOLUMNS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.ShowCreateTableAsSerdeCommand",
   "tableDescs" : [ {
@@ -1102,7 +1163,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "SHOW_CREATETABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.ShowCreateTableCommand",
   "tableDescs" : [ {
@@ -1116,7 +1178,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "SHOW_CREATETABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.ShowPartitionsCommand",
   "tableDescs" : [ {
@@ -1133,7 +1196,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "SHOWPARTITIONS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.ShowTablePropertiesCommand",
   "tableDescs" : [ {
@@ -1147,7 +1211,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "SHOW_TBLPROPERTIES",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.TruncateTableCommand",
   "tableDescs" : [ {
@@ -1164,7 +1229,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "TRUNCATETABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.datasources.CreateTable",
   "tableDescs" : [ {
@@ -1181,12 +1247,14 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanOptionQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.datasources.CreateTempViewUsing",
   "tableDescs" : [ ],
   "opType" : "CREATEVIEW",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.datasources.InsertIntoDataSourceCommand",
   "tableDescs" : [ {
@@ -1207,7 +1275,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand",
   "tableDescs" : [ {
@@ -1231,7 +1300,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.datasources.RefreshTable",
   "tableDescs" : [ {
@@ -1245,7 +1315,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "QUERY",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand",
   "tableDescs" : [ ],
@@ -1253,7 +1324,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.hive.execution.CreateHiveTableAsSelectCommand",
   "tableDescs" : [ {
@@ -1273,7 +1345,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.hive.execution.InsertIntoHiveDirCommand",
   "tableDescs" : [ ],
@@ -1281,7 +1354,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.hive.execution.InsertIntoHiveTable",
   "tableDescs" : [ {
@@ -1305,7 +1379,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.hive.execution.OptimizedCreateHiveTableAsSelectCommand",
   "tableDescs" : [ {
@@ -1325,7 +1400,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.Call",
   "tableDescs" : [ {
@@ -1339,7 +1415,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_PROPERTIES",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.DeleteFromIcebergTable",
   "tableDescs" : [ {
@@ -1357,7 +1434,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "QUERY",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.MergeIntoIcebergTable",
   "tableDescs" : [ {
@@ -1378,7 +1456,8 @@
   "queryDescs" : [ {
     "fieldName" : "sourceTable",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.UnresolvedMergeIntoIcebergTable",
   "tableDescs" : [ {
@@ -1399,7 +1478,8 @@
   "queryDescs" : [ {
     "fieldName" : "sourceTable",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.UpdateIcebergTable",
   "tableDescs" : [ {
@@ -1417,7 +1497,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "QUERY",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.hudi.command.AlterHoodieTableAddColumnsCommand",
   "tableDescs" : [ {
@@ -1434,7 +1515,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_ADDCOLS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.hudi.command.AlterHoodieTableChangeColumnCommand",
   "tableDescs" : [ {
@@ -1451,7 +1533,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_REPLACECOLS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.hudi.command.AlterHoodieTableDropPartitionCommand",
   "tableDescs" : [ {
@@ -1468,7 +1551,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_DROPPARTS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.hudi.command.AlterHoodieTableRenameCommand",
   "tableDescs" : [ {
@@ -1486,7 +1570,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_RENAME",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.hudi.command.AlterTableCommand",
   "tableDescs" : [ {
@@ -1500,7 +1585,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_PROPERTIES",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.hudi.command.CallProcedureHoodieCommand",
   "tableDescs" : [ {
@@ -1531,7 +1617,18 @@
     "setCurrentDatabaseIfMissing" : true
   } ],
   "opType" : "QUERY",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
+}, {
+  "classname" : 
"org.apache.spark.sql.hudi.command.CompactionHoodiePathCommand",
+  "tableDescs" : [ ],
+  "opType" : "CREATETABLE",
+  "queryDescs" : [ ],
+  "uriDescs" : [ {
+    "fieldName" : "path",
+    "fieldExtractor" : "StringURIExtractor",
+    "isInput" : false
+  } ]
 }, {
   "classname" : 
"org.apache.spark.sql.hudi.command.CompactionHoodieTableCommand",
   "tableDescs" : [ {
@@ -1543,18 +1640,20 @@
     "catalogDesc" : null,
     "isInput" : false,
     "setCurrentDatabaseIfMissing" : false
-  }, {
-    "fieldName" : "table",
-    "fieldExtractor" : "CatalogTableTableExtractor",
-    "columnDesc" : null,
-    "actionTypeDesc" : null,
-    "tableTypeDesc" : null,
-    "catalogDesc" : null,
-    "isInput" : true,
-    "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "CREATETABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
+}, {
+  "classname" : 
"org.apache.spark.sql.hudi.command.CompactionShowHoodiePathCommand",
+  "tableDescs" : [ ],
+  "opType" : "SHOW_TBLPROPERTIES",
+  "queryDescs" : [ ],
+  "uriDescs" : [ {
+    "fieldName" : "path",
+    "fieldExtractor" : "StringURIExtractor",
+    "isInput" : true
+  } ]
 }, {
   "classname" : 
"org.apache.spark.sql.hudi.command.CompactionShowHoodieTableCommand",
   "tableDescs" : [ {
@@ -1568,7 +1667,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "SHOW_TBLPROPERTIES",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.hudi.command.CreateHoodieTableAsSelectCommand",
   "tableDescs" : [ {
@@ -1585,7 +1685,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.hudi.command.CreateHoodieTableCommand",
   "tableDescs" : [ {
@@ -1599,7 +1700,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "CREATETABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.hudi.command.CreateHoodieTableLikeCommand",
   "tableDescs" : [ {
@@ -1622,7 +1724,8 @@
     "setCurrentDatabaseIfMissing" : true
   } ],
   "opType" : "CREATETABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.hudi.command.CreateIndexCommand",
   "tableDescs" : [ {
@@ -1636,7 +1739,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "CREATEINDEX",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.hudi.command.DeleteHoodieTableCommand",
   "tableDescs" : [ {
@@ -1654,7 +1758,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "QUERY",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.hudi.command.DropHoodieTableCommand",
   "tableDescs" : [ {
@@ -1672,7 +1777,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "DROPTABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.hudi.command.DropIndexCommand",
   "tableDescs" : [ {
@@ -1686,7 +1792,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "DROPINDEX",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.hudi.command.InsertIntoHoodieTableCommand",
   "tableDescs" : [ {
@@ -1707,7 +1814,8 @@
   "queryDescs" : [ {
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.hudi.command.MergeIntoHoodieTableCommand",
   "tableDescs" : [ {
@@ -1728,7 +1836,8 @@
   "queryDescs" : [ {
     "fieldName" : "mergeInto",
     "fieldExtractor" : "HudiMergeIntoSourceTableExtractor"
-  } ]
+  } ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.hudi.command.RefreshIndexCommand",
   "tableDescs" : [ {
@@ -1742,7 +1851,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERINDEX_REBUILD",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.hudi.command.RepairHoodieTableCommand",
   "tableDescs" : [ {
@@ -1756,7 +1866,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "MSCK",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.hudi.command.ShowHoodieTablePartitionsCommand",
   "tableDescs" : [ {
@@ -1773,7 +1884,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "SHOWPARTITIONS",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.hudi.command.ShowIndexesCommand",
   "tableDescs" : [ {
@@ -1787,7 +1899,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "SHOWINDEXES",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.hudi.command.Spark31AlterTableCommand",
   "tableDescs" : [ {
@@ -1801,7 +1914,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "ALTERTABLE_PROPERTIES",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.hudi.command.TruncateHoodieTableCommand",
   "tableDescs" : [ {
@@ -1818,7 +1932,8 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "TRUNCATETABLE",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.hudi.command.UpdateHoodieTableCommand",
   "tableDescs" : [ {
@@ -1836,5 +1951,6 @@
     "setCurrentDatabaseIfMissing" : false
   } ],
   "opType" : "QUERY",
-  "queryDescs" : [ ]
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 } ]
\ No newline at end of file
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ObjectType.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ObjectType.scala
index c94bf4f8d..fe53440c1 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ObjectType.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ObjectType.scala
@@ -23,7 +23,7 @@ object ObjectType extends Enumeration {
 
   type ObjectType = Value
 
-  val DATABASE, TABLE, VIEW, COLUMN, FUNCTION, INDEX = Value
+  val DATABASE, TABLE, VIEW, COLUMN, FUNCTION, INDEX, URI = Value
 
   def apply(obj: PrivilegeObject, opType: OperationType): ObjectType = {
     obj.privilegeObjectType match {
@@ -33,6 +33,7 @@ object ObjectType extends Enumeration {
       case PrivilegeObjectType.TABLE_OR_VIEW if 
opType.toString.contains("VIEW") => VIEW
       case PrivilegeObjectType.TABLE_OR_VIEW => TABLE
       case PrivilegeObjectType.FUNCTION => FUNCTION
+      case PrivilegeObjectType.DFS_URL | PrivilegeObjectType.LOCAL_URI => URI
     }
   }
 }
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObject.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObject.scala
index 195aa7989..0fe145b4a 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObject.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObject.scala
@@ -17,11 +17,12 @@
 
 package org.apache.kyuubi.plugin.spark.authz
 
+import java.net.URI
 import javax.annotation.Nonnull
 
 import 
org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectActionType.PrivilegeObjectActionType
 import org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectType._
-import org.apache.kyuubi.plugin.spark.authz.serde.{Database, Function, Table}
+import org.apache.kyuubi.plugin.spark.authz.serde.{Database, Function, Table, 
Uri}
 
 /**
  * Build a Spark logical plan to different `PrivilegeObject`s
@@ -86,4 +87,19 @@ object PrivilegeObject {
       None
     ) // TODO: Support catalog for function
   }
+
+  def apply(uri: Uri): PrivilegeObject = {
+    val privilegeObjectType = Option(new URI(uri.path).getScheme) match {
+      case Some("file") => LOCAL_URI
+      case _ => DFS_URL
+    }
+    new PrivilegeObject(
+      privilegeObjectType,
+      PrivilegeObjectActionType.OTHER,
+      uri.path,
+      null,
+      Nil,
+      None,
+      None)
+  }
 }
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObjectType.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObjectType.scala
index f514fcb82..4020392f2 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObjectType.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObjectType.scala
@@ -20,5 +20,5 @@ package org.apache.kyuubi.plugin.spark.authz
 object PrivilegeObjectType extends Enumeration {
   type PrivilegeObjectType = Value
 
-  val DATABASE, TABLE_OR_VIEW, FUNCTION = Value
+  val DATABASE, TABLE_OR_VIEW, FUNCTION, LOCAL_URI, DFS_URL = Value
 }
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
index 0d4b53a5c..7d6d791ad 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
@@ -194,6 +194,23 @@ object PrivilegesBuilder {
             outputObjs ++= getTablePriv(td)
           }
         }
+        spec.uriDescs.foreach { ud =>
+          try {
+            val uri = ud.extract(plan)
+            uri match {
+              case Some(uri) =>
+                if (ud.isInput) {
+                  inputObjs += PrivilegeObject(uri)
+                } else {
+                  outputObjs += PrivilegeObject(uri)
+                }
+              case None =>
+            }
+          } catch {
+            case e: Exception =>
+              LOG.debug(ud.error(plan, e))
+          }
+        }
         spec.queries(plan).foreach(buildQuery(_, inputObjs, spark = spark))
         spec.operationType
 
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessResource.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessResource.scala
index 23cd87b27..858dc1c37 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessResource.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessResource.scala
@@ -17,6 +17,9 @@
 
 package org.apache.kyuubi.plugin.spark.authz.ranger
 
+import java.io.File
+import java.util
+
 import scala.language.implicitConversions
 
 import org.apache.ranger.plugin.policyengine.RangerAccessResourceImpl
@@ -35,6 +38,7 @@ class AccessResource private (val objectType: ObjectType, val 
catalog: Option[St
     val columnStr = getColumn
     if (columnStr == null) Nil else columnStr.split(",").filter(_.nonEmpty)
   }
+  def getUrl: String = getValue("url")
 }
 
 object AccessResource {
@@ -60,6 +64,16 @@ object AccessResource {
       case TABLE | VIEW | INDEX =>
         resource.setValue("database", firstLevelResource)
         resource.setValue("table", secondLevelResource)
+      case URI =>
+        val objectList = new util.ArrayList[String]
+        Option(firstLevelResource)
+          .filter(_.nonEmpty)
+          .foreach { path =>
+            val s = path.stripSuffix(File.separator)
+            objectList.add(s)
+            objectList.add(s + File.separator)
+          }
+        resource.setValue("url", objectList)
     }
     resource.setServiceDef(SparkRangerAdminPlugin.getServiceDef)
     owner.foreach(resource.setOwnerUser)
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessType.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessType.scala
index d533d638b..7f1ddb68e 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessType.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessType.scala
@@ -35,6 +35,9 @@ object AccessType extends Enumeration {
           case CREATETABLE | CREATEVIEW | CREATETABLE_AS_SELECT
               if obj.privilegeObjectType == TABLE_OR_VIEW =>
             if (isInput) SELECT else CREATE
+          case CREATETABLE
+              if obj.privilegeObjectType == DFS_URL || obj.privilegeObjectType 
== LOCAL_URI =>
+            if (isInput) SELECT else CREATE
           case ALTERDATABASE |
               ALTERDATABASE_LOCATION |
               ALTERTABLE_ADDCOLS |
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/CommandSpec.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/CommandSpec.scala
index 22bf07bfa..7b306551c 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/CommandSpec.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/CommandSpec.scala
@@ -83,7 +83,8 @@ case class TableCommandSpec(
     classname: String,
     tableDescs: Seq[TableDesc],
     opType: String = OperationType.QUERY.toString,
-    queryDescs: Seq[QueryDesc] = Nil) extends CommandSpec {
+    queryDescs: Seq[QueryDesc] = Nil,
+    uriDescs: Seq[UriDesc] = Nil) extends CommandSpec {
   def queries: LogicalPlan => Seq[LogicalPlan] = plan => {
     queryDescs.flatMap { qd =>
       try {
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
index fc660ce14..4869fc1da 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
@@ -306,3 +306,21 @@ case class ScanDesc(
     }
   }
 }
+
+/**
+ * Function Descriptor
+ *
+ * @param fieldName the field name or method name of this function field
+ * @param fieldExtractor the key of a [[FunctionExtractor]] instance
+ * @param isInput read or write
+ */
+case class UriDesc(
+    fieldName: String,
+    fieldExtractor: String,
+    isInput: Boolean = false) extends Descriptor {
+  override def extract(v: AnyRef): Option[Uri] = {
+    val uriVal = invokeAs[AnyRef](v, fieldName)
+    val uriExtractor = lookupExtractor[URIExtractor](fieldExtractor)
+    uriExtractor(uriVal)
+  }
+}
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObjectType.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Uri.scala
similarity index 81%
copy from 
extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObjectType.scala
copy to 
extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Uri.scala
index f514fcb82..aa9af8732 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObjectType.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Uri.scala
@@ -15,10 +15,12 @@
  * limitations under the License.
  */
 
-package org.apache.kyuubi.plugin.spark.authz
+package org.apache.kyuubi.plugin.spark.authz.serde
 
-object PrivilegeObjectType extends Enumeration {
-  type PrivilegeObjectType = Value
-
-  val DATABASE, TABLE_OR_VIEW, FUNCTION = Value
-}
+/**
+ * :: Developer API ::
+ *
+ * Represents a URI identity
+ * @param path
+ */
+case class Uri(path: String)
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/package.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/package.scala
index 6863516b6..1c5ffb629 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/package.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/package.scala
@@ -34,6 +34,7 @@ import 
org.apache.kyuubi.plugin.spark.authz.serde.FunctionTypeExtractor.function
 import 
org.apache.kyuubi.plugin.spark.authz.serde.QueryExtractor.queryExtractors
 import 
org.apache.kyuubi.plugin.spark.authz.serde.TableExtractor.tableExtractors
 import 
org.apache.kyuubi.plugin.spark.authz.serde.TableTypeExtractor.tableTypeExtractors
+import org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor.uriExtractors
 import org.apache.kyuubi.util.reflect.ReflectUtils._
 
 package object serde {
@@ -129,6 +130,7 @@ package object serde {
       case c if classOf[FunctionExtractor].isAssignableFrom(c) => 
functionExtractors
       case c if classOf[FunctionTypeExtractor].isAssignableFrom(c) => 
functionTypeExtractors
       case c if classOf[ActionTypeExtractor].isAssignableFrom(c) => 
actionTypeExtractors
+      case c if classOf[URIExtractor].isAssignableFrom(c) => uriExtractors
       case _ => throw new IllegalArgumentException(s"Unknown extractor type: 
$ct")
     }
     extractors(extractorKey).asInstanceOf[T]
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObjectType.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/pathExtractors.scala
similarity index 67%
copy from 
extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObjectType.scala
copy to 
extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/pathExtractors.scala
index f514fcb82..81fa8411b 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObjectType.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/pathExtractors.scala
@@ -15,10 +15,21 @@
  * limitations under the License.
  */
 
-package org.apache.kyuubi.plugin.spark.authz
+package org.apache.kyuubi.plugin.spark.authz.serde
 
-object PrivilegeObjectType extends Enumeration {
-  type PrivilegeObjectType = Value
+trait URIExtractor extends (AnyRef => Option[Uri]) with Extractor
 
-  val DATABASE, TABLE_OR_VIEW, FUNCTION = Value
+object URIExtractor {
+  val uriExtractors: Map[String, URIExtractor] = {
+    loadExtractorsToMap[URIExtractor]
+  }
+}
+
+/**
+ * String
+ */
+class StringURIExtractor extends URIExtractor {
+  override def apply(v1: AnyRef): Option[Uri] = {
+    Some(Uri(v1.asInstanceOf[String]))
+  }
 }
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala
index 9d80ee0f4..381f8081a 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala
@@ -136,7 +136,7 @@ object HudiCommands extends CommandSpecs[TableCommandSpec] {
   val CompactionHoodieTableCommand = {
     val cmd = "org.apache.spark.sql.hudi.command.CompactionHoodieTableCommand"
     val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor])
-    TableCommandSpec(cmd, Seq(tableDesc, tableDesc.copy(isInput = true)), 
CREATETABLE)
+    TableCommandSpec(cmd, Seq(tableDesc), CREATETABLE)
   }
 
   val CompactionShowHoodieTableCommand = {
@@ -145,6 +145,22 @@ object HudiCommands extends CommandSpecs[TableCommandSpec] 
{
     TableCommandSpec(cmd, Seq(tableDesc), SHOW_TBLPROPERTIES)
   }
 
+  val CompactionHoodiePathCommand = {
+    val cmd = "org.apache.spark.sql.hudi.command.CompactionHoodiePathCommand"
+    val uriDesc = UriDesc("path", classOf[StringURIExtractor])
+    TableCommandSpec(
+      cmd,
+      Seq.empty,
+      CREATETABLE,
+      uriDescs = Seq(uriDesc))
+  }
+
+  val CompactionShowHoodiePathCommand = {
+    val cmd = 
"org.apache.spark.sql.hudi.command.CompactionShowHoodiePathCommand"
+    val uriDesc = UriDesc("path", classOf[StringURIExtractor], isInput = true)
+    TableCommandSpec(cmd, Seq.empty, SHOW_TBLPROPERTIES, uriDescs = 
Seq(uriDesc))
+  }
+
   val CreateIndexCommand = {
     val cmd = "org.apache.spark.sql.hudi.command.CreateIndexCommand"
     val tableDesc = TableDesc("table", classOf[CatalogTableTableExtractor])
@@ -253,7 +269,9 @@ object HudiCommands extends CommandSpecs[TableCommandSpec] {
     CreateHoodieTableCommand,
     CreateHoodieTableLikeCommand,
     CreateIndexCommand,
+    CompactionHoodiePathCommand,
     CompactionHoodieTableCommand,
+    CompactionShowHoodiePathCommand,
     CompactionShowHoodieTableCommand,
     DeleteHoodieTableCommand,
     DropHoodieTableCommand,
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala
index 042072910..8cff1698d 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala
@@ -315,7 +315,7 @@ class HudiCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
       val compactionTable = s"RUN COMPACTION ON $namespace1.$table1"
       interceptContains[AccessControlException] {
         doAs(someone, sql(compactionTable))
-      }(s"does not have [select] privilege on [$namespace1/$table1]")
+      }(s"does not have [create] privilege on [$namespace1/$table1]")
       doAs(admin, sql(compactionTable))
 
       val showCompactionTable = s"SHOW COMPACTION ON  $namespace1.$table1"
@@ -326,6 +326,45 @@ class HudiCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
     }
   }
 
+  test("CompactionHoodiePathCommand / CompactionShowHoodiePathCommand") {
+    withSingleCallEnabled {
+      withCleanTmpResources(Seq.empty) {
+        val path1 = "hdfs://demo/test/hudi/path"
+        val compactOnPath = s"RUN COMPACTION ON '$path1'"
+        interceptContains[AccessControlException](
+          doAs(someone, sql(compactOnPath)))(
+          s"does not have [create] privilege on [[$path1, $path1/]]")
+
+        val showCompactOnPath = s"SHOW COMPACTION ON '$path1'"
+        interceptContains[AccessControlException](
+          doAs(someone, sql(showCompactOnPath)))(
+          s"does not have [select] privilege on [[$path1, $path1/]]")
+
+        val path2 = "file:///demo/test/hudi/path"
+        val compactOnPath2 = s"RUN COMPACTION ON '$path2'"
+        interceptContains[AccessControlException](
+          doAs(someone, sql(compactOnPath2)))(
+          s"does not have [create] privilege on [[$path2, $path2/]]")
+
+        val showCompactOnPath2 = s"SHOW COMPACTION ON '$path2'"
+        interceptContains[AccessControlException](
+          doAs(someone, sql(showCompactOnPath2)))(
+          s"does not have [select] privilege on [[$path2, $path2/]]")
+
+        val path3 = "hdfs://demo/test/hudi/path"
+        val compactOnPath3 = s"RUN COMPACTION ON '$path3'"
+        interceptContains[AccessControlException](
+          doAs(someone, sql(compactOnPath3)))(
+          s"does not have [create] privilege on [[$path3, $path3/]]")
+
+        val showCompactOnPath3 = s"SHOW COMPACTION ON '$path3/'"
+        interceptContains[AccessControlException](
+          doAs(someone, sql(showCompactOnPath3)))(
+          s"does not have [select] privilege on [[$path3, $path3/]]")
+      }
+    }
+  }
+
   test("InsertIntoHoodieTableCommand") {
     withSingleCallEnabled {
       withCleanTmpResources(Seq(

Reply via email to