This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 517543ae4 [KYUUBI #5642][AUTHZ] Support path privilege of 
CreateDatabaseCommand/AlterDatabaseSetLocationCommand/CreateNamespace/SetNamespaceLocation
517543ae4 is described below

commit 517543ae490321f5cfc15ce5bdc400adb8b1389b
Author: Angerszhuuuu <[email protected]>
AuthorDate: Thu Nov 9 11:07:52 2023 +0800

    [KYUUBI #5642][AUTHZ] Support path privilege of 
CreateDatabaseCommand/AlterDatabaseSetLocationCommand/CreateNamespace/SetNamespaceLocation
    
    ### _Why are the changes needed?_
    To close #5642
    Support path privilege of 
CreateDatabaseCommand/AlterDatabaseSetLocationCommand/CreateNamespace/SetNamespaceLocation
    
    ### _How was this patch tested?_
    - [x] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [ ] [Run 
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
 locally before make a pull request
    
    ### _Was this patch authored or co-authored using generative AI tooling?_
    No
    
    Closes #5645 from AngersZhuuuu/KYUUBi-5642.
    
    Closes #5642
    
    4063e449f [Angerszhuuuu] Update RangerSparkExtensionSuite.scala
    2d08486cf [Angerszhuuuu] Update V2CommandsPrivilegesSuite.scala
    9097b07ce [Angerszhuuuu] Update PrivilegesBuilderSuite.scala
    d3919fa15 [Angerszhuuuu] Update PrivilegesBuilderSuite.scala
    9f2de3079 [Angerszhuuuu] Merge branch 'master' into KYUUBi-5642
    1e42b8bb6 [Angerszhuuuu] Update V2CommandsPrivilegesSuite.scala
    a07e87048 [Angerszhuuuu] update
    db9696ee4 [Angerszhuuuu] Update DatabaseCommands.scala
    871df4e21 [Angerszhuuuu] follow comment
    5b82823aa [Angerszhuuuu] Merge branch 'master' into KYUUBi-5642
    b73be3cb7 [Angerszhuuuu] [KYUUBI #5642][AUTHZ] Support path privilege of 
CreateDatabaseCommand AlterDatabaseSetLocationCommand
    
    Authored-by: Angerszhuuuu <[email protected]>
    Signed-off-by: Kent Yao <[email protected]>
---
 ...he.kyuubi.plugin.spark.authz.serde.URIExtractor |  3 +-
 .../src/main/resources/database_command_spec.json  | 61 ++++++++++++++++------
 .../src/main/resources/table_command_spec.json     |  2 +-
 .../plugin/spark/authz/PrivilegesBuilder.scala     | 13 +++++
 .../plugin/spark/authz/serde/CommandSpec.scala     |  3 +-
 .../plugin/spark/authz/serde/uriExtractors.scala   | 14 ++++-
 .../spark/authz/PrivilegesBuilderSuite.scala       | 29 ++++++----
 .../spark/authz/V2CommandsPrivilegesSuite.scala    | 29 ++++++----
 .../plugin/spark/authz/gen/DatabaseCommands.scala  | 30 ++++++++---
 .../plugin/spark/authz/gen/TableCommands.scala     |  2 +-
 .../authz/ranger/RangerSparkExtensionSuite.scala   | 26 +++++++++
 11 files changed, 163 insertions(+), 49 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
index 733994176..f40122615 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
@@ -17,6 +17,7 @@
 
 org.apache.kyuubi.plugin.spark.authz.serde.CatalogStorageFormatURIExtractor
 org.apache.kyuubi.plugin.spark.authz.serde.BaseRelationFileIndexURIExtractor
-org.apache.kyuubi.plugin.spark.authz.serde.OptionsUriExtractor
+org.apache.kyuubi.plugin.spark.authz.serde.PropertiesPathUriExtractor
+org.apache.kyuubi.plugin.spark.authz.serde.PropertiesLocationUriExtractor
 org.apache.kyuubi.plugin.spark.authz.serde.StringURIExtractor
 org.apache.kyuubi.plugin.spark.authz.serde.StringSeqURIExtractor
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/database_command_spec.json
 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/database_command_spec.json
index c640ed89b..c45e63890 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/database_command_spec.json
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/database_command_spec.json
@@ -6,7 +6,8 @@
     "catalogDesc" : null,
     "isInput" : false
   } ],
-  "opType" : "ALTERDATABASE"
+  "opType" : "ALTERDATABASE",
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.CreateNamespace",
   "databaseDescs" : [ {
@@ -28,7 +29,12 @@
     "catalogDesc" : null,
     "isInput" : false
   } ],
-  "opType" : "CREATEDATABASE"
+  "opType" : "CREATEDATABASE",
+  "uriDescs" : [ {
+    "fieldName" : "properties",
+    "fieldExtractor" : "PropertiesLocationUriExtractor",
+    "isInput" : false
+  } ]
 }, {
   "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.DescribeNamespace",
   "databaseDescs" : [ {
@@ -37,7 +43,8 @@
     "catalogDesc" : null,
     "isInput" : true
   } ],
-  "opType" : "DESCDATABASE"
+  "opType" : "DESCDATABASE",
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.DropNamespace",
   "databaseDescs" : [ {
@@ -46,7 +53,8 @@
     "catalogDesc" : null,
     "isInput" : false
   } ],
-  "opType" : "DROPDATABASE"
+  "opType" : "DROPDATABASE",
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.SetCatalogAndNamespace",
   "databaseDescs" : [ {
@@ -68,7 +76,8 @@
     },
     "isInput" : true
   } ],
-  "opType" : "SWITCHDATABASE"
+  "opType" : "SWITCHDATABASE",
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.SetNamespaceLocation",
   "databaseDescs" : [ {
@@ -77,7 +86,12 @@
     "catalogDesc" : null,
     "isInput" : false
   } ],
-  "opType" : "ALTERDATABASE_LOCATION"
+  "opType" : "ALTERDATABASE_LOCATION",
+  "uriDescs" : [ {
+    "fieldName" : "location",
+    "fieldExtractor" : "StringURIExtractor",
+    "isInput" : false
+  } ]
 }, {
   "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.SetNamespaceProperties",
   "databaseDescs" : [ {
@@ -86,7 +100,8 @@
     "catalogDesc" : null,
     "isInput" : false
   } ],
-  "opType" : "ALTERDATABASE"
+  "opType" : "ALTERDATABASE",
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AlterDatabasePropertiesCommand",
   "databaseDescs" : [ {
@@ -95,7 +110,8 @@
     "catalogDesc" : null,
     "isInput" : false
   } ],
-  "opType" : "ALTERDATABASE"
+  "opType" : "ALTERDATABASE",
+  "uriDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AlterDatabaseSetLocationCommand",
   "databaseDescs" : [ {
@@ -104,7 +120,12 @@
     "catalogDesc" : null,
     "isInput" : false
   } ],
-  "opType" : "ALTERDATABASE_LOCATION"
+  "opType" : "ALTERDATABASE_LOCATION",
+  "uriDescs" : [ {
+    "fieldName" : "location",
+    "fieldExtractor" : "StringURIExtractor",
+    "isInput" : false
+  } ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.AnalyzeTablesCommand",
   "databaseDescs" : [ {
@@ -113,7 +134,8 @@
     "catalogDesc" : null,
     "isInput" : true
   } ],
-  "opType" : "ANALYZE_TABLE"
+  "opType" : "ANALYZE_TABLE",
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.CreateDatabaseCommand",
   "databaseDescs" : [ {
@@ -122,7 +144,12 @@
     "catalogDesc" : null,
     "isInput" : false
   } ],
-  "opType" : "CREATEDATABASE"
+  "opType" : "CREATEDATABASE",
+  "uriDescs" : [ {
+    "fieldName" : "path",
+    "fieldExtractor" : "StringURIExtractor",
+    "isInput" : false
+  } ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.DescribeDatabaseCommand",
   "databaseDescs" : [ {
@@ -131,7 +158,8 @@
     "catalogDesc" : null,
     "isInput" : true
   } ],
-  "opType" : "DESCDATABASE"
+  "opType" : "DESCDATABASE",
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.DropDatabaseCommand",
   "databaseDescs" : [ {
@@ -140,7 +168,8 @@
     "catalogDesc" : null,
     "isInput" : false
   } ],
-  "opType" : "DROPDATABASE"
+  "opType" : "DROPDATABASE",
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.SetDatabaseCommand",
   "databaseDescs" : [ {
@@ -149,7 +178,8 @@
     "catalogDesc" : null,
     "isInput" : true
   } ],
-  "opType" : "SWITCHDATABASE"
+  "opType" : "SWITCHDATABASE",
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.SetNamespaceCommand",
   "databaseDescs" : [ {
@@ -158,5 +188,6 @@
     "catalogDesc" : null,
     "isInput" : true
   } ],
-  "opType" : "SWITCHDATABASE"
+  "opType" : "SWITCHDATABASE",
+  "uriDescs" : [ ]
 } ]
\ No newline at end of file
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index d4c8a27f6..a71052324 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -1400,7 +1400,7 @@
   } ],
   "uriDescs" : [ {
     "fieldName" : "options",
-    "fieldExtractor" : "OptionsUriExtractor",
+    "fieldExtractor" : "PropertiesPathUriExtractor",
     "isInput" : false
   } ]
 }, {
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
index b541efc11..212ed74aa 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
@@ -189,6 +189,19 @@ object PrivilegesBuilder {
               LOG.debug(databaseDesc.error(plan, e))
           }
         }
+        desc.uriDescs.foreach { ud =>
+          try {
+            val uris = ud.extract(plan)
+            if (ud.isInput) {
+              inputObjs ++= uris.map(PrivilegeObject(_))
+            } else {
+              outputObjs ++= uris.map(PrivilegeObject(_))
+            }
+          } catch {
+            case e: Exception =>
+              LOG.debug(ud.error(plan, e))
+          }
+        }
         desc.operationType
 
       case classname if TABLE_COMMAND_SPECS.contains(classname) =>
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/CommandSpec.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/CommandSpec.scala
index 14f3719b8..c4fd721ca 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/CommandSpec.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/CommandSpec.scala
@@ -57,7 +57,8 @@ trait CommandSpecs[T <: CommandSpec] {
 case class DatabaseCommandSpec(
     classname: String,
     databaseDescs: Seq[DatabaseDesc],
-    opType: String = "QUERY") extends CommandSpec {}
+    opType: String = OperationType.QUERY.toString,
+    uriDescs: Seq[UriDesc] = Nil) extends CommandSpec {}
 
 /**
  * A specification describe a function command
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
index 46fd4d87a..b43d27057 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
@@ -33,7 +33,11 @@ object URIExtractor {
  */
 class StringURIExtractor extends URIExtractor {
   override def apply(v1: AnyRef): Seq[Uri] = {
-    Seq(Uri(v1.asInstanceOf[String]))
+    v1 match {
+      case uriPath: String => Seq(Uri(uriPath))
+      case Some(uriPath: String) => Seq(Uri(uriPath))
+      case _ => Nil
+    }
   }
 }
 
@@ -49,12 +53,18 @@ class CatalogStorageFormatURIExtractor extends URIExtractor 
{
   }
 }
 
-class OptionsUriExtractor extends URIExtractor {
+class PropertiesPathUriExtractor extends URIExtractor {
   override def apply(v1: AnyRef): Seq[Uri] = {
     v1.asInstanceOf[Map[String, String]].get("path").map(Uri).toSeq
   }
 }
 
+class PropertiesLocationUriExtractor extends URIExtractor {
+  override def apply(v1: AnyRef): Seq[Uri] = {
+    v1.asInstanceOf[Map[String, String]].get("location").map(Uri).toSeq
+  }
+}
+
 class BaseRelationFileIndexURIExtractor extends URIExtractor {
   override def apply(v1: AnyRef): Seq[Uri] = {
     v1 match {
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
index 5d87f91f9..a331faf61 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
@@ -1292,16 +1292,25 @@ class InMemoryPrivilegeBuilderSuite extends 
PrivilegesBuilderSuite {
       "org.apache.spark.sql.execution.command.AlterDatabaseSetLocationCommand")
     assert(operationType === ALTERDATABASE_LOCATION)
     assert(in.isEmpty)
-    assert(out.size === 1)
-    val po = out.head
-    assert(po.actionType === PrivilegeObjectActionType.OTHER)
-    assert(po.privilegeObjectType === PrivilegeObjectType.DATABASE)
-    assert(po.catalog.isEmpty)
-    assertEqualsIgnoreCase(defaultDb)(po.dbname)
-    assertEqualsIgnoreCase(defaultDb)(po.objectName)
-    assert(po.columns.isEmpty)
-    val accessType = ranger.AccessType(po, operationType, isInput = false)
-    assert(accessType === AccessType.ALTER)
+    assert(out.size === 2)
+    val po0 = out.head
+    assert(po0.actionType === PrivilegeObjectActionType.OTHER)
+    assert(po0.privilegeObjectType === PrivilegeObjectType.DATABASE)
+    assert(po0.catalog.isEmpty)
+    assertEqualsIgnoreCase(defaultDb)(po0.dbname)
+    assertEqualsIgnoreCase(defaultDb)(po0.objectName)
+    assert(po0.columns.isEmpty)
+    val accessType0 = ranger.AccessType(po0, operationType, isInput = false)
+    assert(accessType0 === AccessType.ALTER)
+
+    val po1 = out.last
+    assert(po1.actionType === PrivilegeObjectActionType.OTHER)
+    assert(po1.catalog.isEmpty)
+    assertEqualsIgnoreCase(defaultDb)(po0.dbname)
+    assertEqualsIgnoreCase(defaultDb)(po0.objectName)
+    assert(po1.columns.isEmpty)
+    val accessType1 = ranger.AccessType(po1, operationType, isInput = false)
+    assert(accessType1 === AccessType.WRITE)
   }
 
   test("CreateDataSourceTableAsSelectCommand") {
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2CommandsPrivilegesSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2CommandsPrivilegesSuite.scala
index 149c9ba8f..62b7939b3 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2CommandsPrivilegesSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2CommandsPrivilegesSuite.scala
@@ -738,16 +738,25 @@ abstract class V2CommandsPrivilegesSuite extends 
PrivilegesBuilderSuite {
       "org.apache.spark.sql.catalyst.plans.logical.SetNamespaceLocation")
     assert(operationType === ALTERDATABASE_LOCATION)
     assert(in.isEmpty)
-    assert(out.size === 1)
-    val po = out.head
-    assert(po.actionType === PrivilegeObjectActionType.OTHER)
-    assert(po.privilegeObjectType === PrivilegeObjectType.DATABASE)
-    assert(po.catalog.get === sparkSessionCatalogName)
-    assertEqualsIgnoreCase(defaultDb)(po.dbname)
-    assertEqualsIgnoreCase(defaultDb)(po.objectName)
-    assert(po.columns.isEmpty)
-    val accessType = ranger.AccessType(po, operationType, isInput = false)
-    assert(accessType === AccessType.ALTER)
+    assert(out.size === 2)
+    val po0 = out.head
+    assert(po0.actionType === PrivilegeObjectActionType.OTHER)
+    assert(po0.privilegeObjectType === PrivilegeObjectType.DATABASE)
+    assert(po0.catalog.get === sparkSessionCatalogName)
+    assertEqualsIgnoreCase(defaultDb)(po0.dbname)
+    assertEqualsIgnoreCase(defaultDb)(po0.objectName)
+    assert(po0.columns.isEmpty)
+    val accessType0 = ranger.AccessType(po0, operationType, isInput = false)
+    assert(accessType0 === AccessType.ALTER)
+
+    val po1 = out.last
+    assert(po1.actionType === PrivilegeObjectActionType.OTHER)
+    assert(po1.catalog.isEmpty)
+    assertEqualsIgnoreCase(defaultDb)(po0.dbname)
+    assertEqualsIgnoreCase(defaultDb)(po0.objectName)
+    assert(po1.columns.isEmpty)
+    val accessType1 = ranger.AccessType(po1, operationType, isInput = false)
+    assert(accessType1 === AccessType.WRITE)
   }
 
   test("DescribeNamespace") {
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala
index 4436d9566..ebaddf622 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala
@@ -22,6 +22,22 @@ import org.apache.kyuubi.plugin.spark.authz.serde._
 
 object DatabaseCommands extends CommandSpecs[DatabaseCommandSpec] {
 
+  val CreateDatabaseCommand = {
+    DatabaseCommandSpec(
+      "org.apache.spark.sql.execution.command.CreateDatabaseCommand",
+      Seq(DatabaseDesc("databaseName", classOf[StringDatabaseExtractor])),
+      CREATEDATABASE,
+      Seq(UriDesc("path", classOf[StringURIExtractor])))
+  }
+
+  val AlterDatabaseSetLocationCommand = {
+    DatabaseCommandSpec(
+      "org.apache.spark.sql.execution.command.AlterDatabaseSetLocationCommand",
+      Seq(DatabaseDesc("databaseName", classOf[StringDatabaseExtractor])),
+      ALTERDATABASE_LOCATION,
+      Seq(UriDesc("location", classOf[StringURIExtractor])))
+  }
+
   val AlterDatabaseProperties = {
     DatabaseCommandSpec(
       "org.apache.spark.sql.execution.command.AlterDatabasePropertiesCommand",
@@ -47,7 +63,8 @@ object DatabaseCommands extends 
CommandSpecs[DatabaseCommandSpec] {
     DatabaseCommandSpec(
       "org.apache.spark.sql.catalyst.plans.logical.SetNamespaceLocation",
       Seq(DatabaseDesc("namespace", 
classOf[ResolvedNamespaceDatabaseExtractor])),
-      ALTERDATABASE_LOCATION)
+      ALTERDATABASE_LOCATION,
+      Seq(UriDesc("location", classOf[StringURIExtractor])))
   }
 
   val CreateNamespace = {
@@ -62,7 +79,8 @@ object DatabaseCommands extends 
CommandSpecs[DatabaseCommandSpec] {
     DatabaseCommandSpec(
       "org.apache.spark.sql.catalyst.plans.logical.CreateNamespace",
       Seq(databaseDesc1, databaseDesc2, databaseDesc3),
-      CREATEDATABASE)
+      CREATEDATABASE,
+      Seq(UriDesc("properties", classOf[PropertiesLocationUriExtractor])))
   }
 
   val DropNamespace = {
@@ -143,16 +161,12 @@ object DatabaseCommands extends 
CommandSpecs[DatabaseCommandSpec] {
 
   override def specs: Seq[DatabaseCommandSpec] = Seq(
     AlterDatabaseProperties,
-    AlterDatabaseProperties.copy(
-      classname = 
"org.apache.spark.sql.execution.command.AlterDatabaseSetLocationCommand",
-      opType = ALTERDATABASE_LOCATION),
-    AlterDatabaseProperties.copy(
-      classname = 
"org.apache.spark.sql.execution.command.CreateDatabaseCommand",
-      opType = CREATEDATABASE),
+    AlterDatabaseSetLocationCommand,
     AlterDatabaseProperties.copy(
       classname = "org.apache.spark.sql.execution.command.DropDatabaseCommand",
       opType = DROPDATABASE),
     AnalyzeTables,
+    CreateDatabaseCommand,
     CreateNamespace,
     CommentOnNamespace,
     DescribeDatabase,
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
index 3bf863e75..9c264f705 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
@@ -559,7 +559,7 @@ object TableCommands extends CommandSpecs[TableCommandSpec] 
{
   val SaveIntoDataSourceCommand = {
     val cmd = 
"org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand"
     val queryDesc = queryQueryDesc
-    val uriDesc = UriDesc("options", classOf[OptionsUriExtractor])
+    val uriDesc = UriDesc("options", classOf[PropertiesPathUriExtractor])
     TableCommandSpec(cmd, Nil, queryDescs = Seq(queryDesc), uriDescs = 
Seq(uriDesc))
   }
 
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
index ed89f3298..98c513c13 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
@@ -17,6 +17,7 @@
 
 package org.apache.kyuubi.plugin.spark.authz.ranger
 
+import java.lang.reflect.UndeclaredThrowableException
 import java.nio.file.Path
 
 import scala.util.Try
@@ -1172,4 +1173,29 @@ class HiveCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
       }
     }
   }
+
+  test("CreateDatabaseCommand/AlterDatabaseSetLocationCommand") {
+    val db1 = "db1"
+    withSingleCallEnabled {
+      withTempDir { path1 =>
+        withTempDir { path2 =>
+          withCleanTmpResources(Seq((s"$db1", "database"))) {
+            interceptContains[AccessControlException](
+              doAs(someone, sql(s"CREATE DATABASE $db1 LOCATION '$path1'")))(
+              s"does not have [create] privilege on [$db1], " +
+                s"[write] privilege on [[$path1, $path1/]]")
+            doAs(admin, sql(s"CREATE DATABASE $db1 LOCATION '$path1'"))
+            interceptContains[AccessControlException](
+              doAs(someone, sql(s"ALTER DATABASE $db1 SET LOCATION 
'$path2'")))(
+              s"does not have [alter] privilege on [$db1], " +
+                s"[write] privilege on [[$path2, $path2/]]")
+            val e = intercept[UndeclaredThrowableException](
+              doAs(admin, sql(s"ALTER DATABASE $db1 SET LOCATION '$path2'")))
+            assert(e.getCause.getMessage ==
+              "Hive metastore does not support altering database location.")
+          }
+        }
+      }
+    }
+  }
 }

Reply via email to