This is an automated email from the ASF dual-hosted git repository.
yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new ebc98fe5c [KYUUBI #5575][KYUUBI #5611][AUTHZ] Check path privilege
should use correct action type
ebc98fe5c is described below
commit ebc98fe5c67ea8d11426ae87e56c554ac89a63dc
Author: Angerszhuuuu <[email protected]>
AuthorDate: Fri Nov 3 22:06:46 2023 +0800
[KYUUBI #5575][KYUUBI #5611][AUTHZ] Check path privilege should use correct
action type
### _Why are the changes needed?
To close #5575 #5611
Check path privilege should use correct action type
### _How was this patch tested?_
- [x] Add some test cases that check the changes thoroughly including
negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [ ] [Run
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
locally before make a pull request
### _Was this patch authored or co-authored using generative AI tooling?_
No
Closes #5618 from AngersZhuuuu/KYUUBI-5575-5611-FOLLOWUP.
Closes #5575
Closes #5611
07049a5f4 [Angerszhuuuu] Update PrivilegesBuilderSuite.scala
0ae0b2b6d [Angerszhuuuu] [KYUUBI #5575][KYUUBI #5611][AUTHZ] Check path
privilege should use correct action type
Authored-by: Angerszhuuuu <[email protected]>
Signed-off-by: Kent Yao <[email protected]>
---
.../src/main/resources/table_command_spec.json | 17 +++++++++++++++++
.../kyuubi/plugin/spark/authz/PrivilegeObject.scala | 6 ++++--
.../kyuubi/plugin/spark/authz/PrivilegesBuilder.scala | 5 +++--
.../kyuubi/plugin/spark/authz/serde/Descriptor.scala | 1 +
.../plugin/spark/authz/PrivilegesBuilderSuite.scala | 12 ++++++------
.../kyuubi/plugin/spark/authz/gen/TableCommands.scala | 12 ++++++++++--
.../spark/authz/ranger/RangerSparkExtensionSuite.scala | 13 +++++++------
7 files changed, 48 insertions(+), 18 deletions(-)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index 581f33506..5c2dcd09b 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -1100,6 +1100,11 @@
"uriDescs" : [ {
"fieldName" : "storage",
"fieldExtractor" : "CatalogStorageFormatURIExtractor",
+ "actionTypeDesc" : {
+ "fieldName" : "overwrite",
+ "fieldExtractor" : "OverwriteOrInsertActionTypeExtractor",
+ "actionType" : null
+ },
"isInput" : false
} ]
}, {
@@ -1347,6 +1352,11 @@
"uriDescs" : [ {
"fieldName" : "options",
"fieldExtractor" : "OptionsUriExtractor",
+ "actionTypeDesc" : {
+ "fieldName" : "mode",
+ "fieldExtractor" : "SaveModeActionTypeExtractor",
+ "actionType" : null
+ },
"isInput" : false
} ]
}, {
@@ -1381,6 +1391,11 @@
"uriDescs" : [ {
"fieldName" : "storage",
"fieldExtractor" : "CatalogStorageFormatURIExtractor",
+ "actionTypeDesc" : {
+ "fieldName" : "overwrite",
+ "fieldExtractor" : "OverwriteOrInsertActionTypeExtractor",
+ "actionType" : null
+ },
"isInput" : false
} ]
}, {
@@ -1654,6 +1669,7 @@
"uriDescs" : [ {
"fieldName" : "path",
"fieldExtractor" : "StringURIExtractor",
+ "actionTypeDesc" : null,
"isInput" : false
} ]
}, {
@@ -1679,6 +1695,7 @@
"uriDescs" : [ {
"fieldName" : "path",
"fieldExtractor" : "StringURIExtractor",
+ "actionTypeDesc" : null,
"isInput" : true
} ]
}, {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObject.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObject.scala
index 0fe145b4a..fa0deeaa2 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObject.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObject.scala
@@ -88,14 +88,16 @@ object PrivilegeObject {
) // TODO: Support catalog for function
}
- def apply(uri: Uri): PrivilegeObject = {
+ def apply(
+ uri: Uri,
+ actionType: PrivilegeObjectActionType): PrivilegeObject = {
val privilegeObjectType = Option(new URI(uri.path).getScheme) match {
case Some("file") => LOCAL_URI
case _ => DFS_URL
}
new PrivilegeObject(
privilegeObjectType,
- PrivilegeObjectActionType.OTHER,
+ actionType,
uri.path,
null,
Nil,
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
index 846f2b791..3d872690f 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
@@ -196,10 +196,11 @@ object PrivilegesBuilder {
spec.uriDescs.foreach { ud =>
try {
val uris = ud.extract(plan)
+ val actionType =
ud.actionTypeDesc.map(_.extract(plan)).getOrElse(OTHER)
if (ud.isInput) {
- inputObjs ++= uris.map(PrivilegeObject(_))
+ inputObjs ++= uris.map(PrivilegeObject(_, actionType))
} else {
- outputObjs ++= uris.map(PrivilegeObject(_))
+ outputObjs ++= uris.map(PrivilegeObject(_, actionType))
}
} catch {
case e: Exception =>
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
index 5c22975a9..cf295d0c5 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
@@ -317,6 +317,7 @@ case class ScanDesc(
case class UriDesc(
fieldName: String,
fieldExtractor: String,
+ actionTypeDesc: Option[ActionTypeDesc] = None,
isInput: Boolean = false) extends Descriptor {
override def extract(v: AnyRef): Seq[Uri] = {
val uriVal = invokeAs[AnyRef](v, fieldName)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
index 4cb67890e..fbb50c242 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
@@ -1469,13 +1469,13 @@ class HiveCatalogPrivilegeBuilderSuite extends
PrivilegesBuilderSuite {
assert(out.size == 1)
val po1 = out.head
- assert(po1.actionType === PrivilegeObjectActionType.OTHER)
+ assert(po1.actionType === PrivilegeObjectActionType.INSERT_OVERWRITE)
assert(po1.privilegeObjectType === PrivilegeObjectType.DFS_URL)
assert(po1.dbname === directory.path)
assert(po1.objectName === null)
assert(po1.columns === Seq.empty)
val accessType1 = ranger.AccessType(po1, operationType, isInput = true)
- assert(accessType1 == AccessType.SELECT)
+ assert(accessType1 == AccessType.UPDATE)
}
test("InsertIntoDataSourceCommand") {
@@ -1601,13 +1601,13 @@ class HiveCatalogPrivilegeBuilderSuite extends
PrivilegesBuilderSuite {
assert(out.size == 1)
val po1 = out.head
- assert(po1.actionType === PrivilegeObjectActionType.OTHER)
+ assert(po1.actionType === PrivilegeObjectActionType.INSERT_OVERWRITE)
assert(po1.privilegeObjectType === PrivilegeObjectType.DFS_URL)
assert(po1.dbname === directory.path)
assert(po1.objectName === null)
assert(po1.columns === Seq.empty)
val accessType1 = ranger.AccessType(po1, operationType, isInput = true)
- assert(accessType1 == AccessType.SELECT)
+ assert(accessType1 == AccessType.UPDATE)
}
test("InsertIntoHiveDirCommand") {
@@ -1634,13 +1634,13 @@ class HiveCatalogPrivilegeBuilderSuite extends
PrivilegesBuilderSuite {
assert(out.size == 1)
val po1 = out.head
- assert(po1.actionType === PrivilegeObjectActionType.OTHER)
+ assert(po1.actionType === PrivilegeObjectActionType.INSERT_OVERWRITE)
assert(po1.privilegeObjectType === PrivilegeObjectType.DFS_URL)
assert(po1.dbname === directory.path)
assert(po1.objectName === null)
assert(po1.columns === Seq.empty)
val accessType1 = ranger.AccessType(po1, operationType, isInput = true)
- assert(accessType1 == AccessType.SELECT)
+ assert(accessType1 == AccessType.UPDATE)
}
test("InsertIntoHiveTableCommand") {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
index 7e42b02bc..6007d689e 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
@@ -552,14 +552,22 @@ object TableCommands extends
CommandSpecs[TableCommandSpec] {
val InsertIntoDataSourceDir = {
val cmd =
"org.apache.spark.sql.execution.command.InsertIntoDataSourceDirCommand"
val queryDesc = queryQueryDesc
- val uriDesc = UriDesc("storage", classOf[CatalogStorageFormatURIExtractor])
+ val actionTypeDesc = overwriteActionTypeDesc
+ val uriDesc = UriDesc(
+ "storage",
+ classOf[CatalogStorageFormatURIExtractor],
+ actionTypeDesc = Some(actionTypeDesc))
TableCommandSpec(cmd, Nil, queryDescs = Seq(queryDesc), uriDescs =
Seq(uriDesc))
}
val SaveIntoDataSourceCommand = {
val cmd =
"org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand"
val queryDesc = queryQueryDesc
- val uriDesc = UriDesc("options", classOf[OptionsUriExtractor])
+ val actionTypeDesc = ActionTypeDesc("mode",
classOf[SaveModeActionTypeExtractor])
+ val uriDesc = UriDesc(
+ "options",
+ classOf[OptionsUriExtractor],
+ actionTypeDesc = Some(actionTypeDesc))
TableCommandSpec(cmd, Nil, queryDescs = Seq(queryDesc), uriDescs =
Seq(uriDesc))
}
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
index 1f1b42b0f..3bb8379bc 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
@@ -1058,8 +1058,8 @@ class HiveCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
|INSERT OVERWRITE DIRECTORY '$path'
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|SELECT * FROM $db1.$table1""".stripMargin)))(
- s"does not have [select] privilege on
[$db1/$table1/id,$db1/$table1/scope," +
- s"[$path, $path/]]")
+ s"does not have [select] privilege on
[$db1/$table1/id,$db1/$table1/scope], " +
+ s"[update] privilege on [[$path, $path/]]")
}
}
}
@@ -1079,20 +1079,21 @@ class HiveCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
|INSERT OVERWRITE DIRECTORY '$path'
|USING parquet
|SELECT * FROM $db1.$table1""".stripMargin)))(
- s"does not have [select] privilege on
[$db1/$table1/id,$db1/$table1/scope," +
- s"[$path, $path/]]")
+ s"does not have [select] privilege on
[$db1/$table1/id,$db1/$table1/scope], " +
+ s"[update] privilege on [[$path, $path/]]")
}
}
}
}
+
test("SaveIntoDataSourceCommand") {
withTempDir { path =>
withSingleCallEnabled {
val df = sql("SELECT 1 as id, 'Tony' as name")
interceptContains[AccessControlException](doAs(
someone,
- df.write.format("console").save(path.toString)))(
- s"does not have [select] privilege on [[$path, $path/]]")
+ df.write.format("console").mode("append").save(path.toString)))(
+ s"does not have [update] privilege on [[$path, $path/]]")
}
}
}