This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 85af9d921 [KYUUBI #5638][AUTHZ] URI related privileges shall be
READ/WRITE
85af9d921 is described below
commit 85af9d92187aa0e4f1f599a21dea35fc5999f0dd
Author: Kent Yao <[email protected]>
AuthorDate: Tue Nov 7 18:58:27 2023 +0800
[KYUUBI #5638][AUTHZ] URI related privileges shall be READ/WRITE
### _Why are the changes needed?_
This PR fixes #5638 which targets URI related privileges to select and
update
### _How was this patch tested?_
- [x] Add some test cases that check the changes thoroughly including
negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [ ] [Run
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
locally before make a pull request
### _Was this patch authored or co-authored using generative AI tooling?_
no
Closes #5639 from yaooqinn/bug.
Closes #5638
9f2b11b20 [Kent Yao] URI related privileges shall be READ/WRITE
e3856a64c [Kent Yao] URI related privileges shall be READ/WRITE
Authored-by: Kent Yao <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
---
.../kyuubi/plugin/spark/authz/ObjectType.scala | 2 +-
.../plugin/spark/authz/PrivilegeObject.scala | 2 +-
.../plugin/spark/authz/PrivilegeObjectType.scala | 2 +-
.../plugin/spark/authz/ranger/AccessType.scala | 8 ++++---
.../plugin/spark/authz/serde/Descriptor.scala | 6 ++---
.../spark/authz/PrivilegesBuilderSuite.scala | 27 +++++++++++-----------
.../HudiCatalogRangerSparkExtensionSuite.scala | 12 +++++-----
.../authz/ranger/RangerSparkExtensionSuite.scala | 12 +++++-----
8 files changed, 37 insertions(+), 34 deletions(-)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ObjectType.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ObjectType.scala
index fe53440c1..c8662f29d 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ObjectType.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ObjectType.scala
@@ -33,7 +33,7 @@ object ObjectType extends Enumeration {
case PrivilegeObjectType.TABLE_OR_VIEW if
opType.toString.contains("VIEW") => VIEW
case PrivilegeObjectType.TABLE_OR_VIEW => TABLE
case PrivilegeObjectType.FUNCTION => FUNCTION
- case PrivilegeObjectType.DFS_URL | PrivilegeObjectType.LOCAL_URI => URI
+ case PrivilegeObjectType.DFS_URI | PrivilegeObjectType.LOCAL_URI => URI
}
}
}
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObject.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObject.scala
index fa0deeaa2..2e391790e 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObject.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObject.scala
@@ -93,7 +93,7 @@ object PrivilegeObject {
actionType: PrivilegeObjectActionType): PrivilegeObject = {
val privilegeObjectType = Option(new URI(uri.path).getScheme) match {
case Some("file") => LOCAL_URI
- case _ => DFS_URL
+ case _ => DFS_URI
}
new PrivilegeObject(
privilegeObjectType,
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObjectType.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObjectType.scala
index 4020392f2..28b9588ea 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObjectType.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegeObjectType.scala
@@ -20,5 +20,5 @@ package org.apache.kyuubi.plugin.spark.authz
object PrivilegeObjectType extends Enumeration {
type PrivilegeObjectType = Value
- val DATABASE, TABLE_OR_VIEW, FUNCTION, LOCAL_URI, DFS_URL = Value
+ val DATABASE, TABLE_OR_VIEW, FUNCTION, LOCAL_URI, DFS_URI = Value
}
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessType.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessType.scala
index 7f1ddb68e..ca509154b 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessType.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/AccessType.scala
@@ -28,6 +28,11 @@ object AccessType extends Enumeration {
val NONE, CREATE, ALTER, DROP, SELECT, UPDATE, USE, READ, WRITE, ALL, ADMIN,
INDEX = Value
def apply(obj: PrivilegeObject, opType: OperationType, isInput: Boolean):
AccessType = {
+ if (obj.privilegeObjectType == DFS_URI || obj.privilegeObjectType ==
LOCAL_URI) {
+ // This is equivalent to ObjectType.URI
+ return if (isInput) READ else WRITE
+ }
+
obj.actionType match {
case PrivilegeObjectActionType.OTHER => opType match {
case CREATEDATABASE if obj.privilegeObjectType == DATABASE => CREATE
@@ -35,9 +40,6 @@ object AccessType extends Enumeration {
case CREATETABLE | CREATEVIEW | CREATETABLE_AS_SELECT
if obj.privilegeObjectType == TABLE_OR_VIEW =>
if (isInput) SELECT else CREATE
- case CREATETABLE
- if obj.privilegeObjectType == DFS_URL || obj.privilegeObjectType
== LOCAL_URI =>
- if (isInput) SELECT else CREATE
case ALTERDATABASE |
ALTERDATABASE_LOCATION |
ALTERTABLE_ADDCOLS |
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
index cf295d0c5..5d0d77fd6 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Descriptor.scala
@@ -308,10 +308,10 @@ case class ScanDesc(
}
/**
- * Function Descriptor
+ * URI Descriptor
*
- * @param fieldName the field name or method name of this function field
- * @param fieldExtractor the key of a [[FunctionExtractor]] instance
+ * @param fieldName the field name or method name of this uri field
+ * @param fieldExtractor the key of a [[URIExtractor]] instance
* @param isInput read or write
*/
case class UriDesc(
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
index d8ae3d244..6f0a0ae6e 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
@@ -1433,11 +1433,13 @@ class HiveCatalogPrivilegeBuilderSuite extends
PrivilegesBuilderSuite {
assert(in.size === 1)
val po0 = in.head
assert(po0.actionType === PrivilegeObjectActionType.OTHER)
- assert(po0.privilegeObjectType === PrivilegeObjectType.DFS_URL)
+ assert(po0.privilegeObjectType === PrivilegeObjectType.DFS_URI)
assert(po0.dbname === dataPath)
assert(po0.objectName === null)
assert(po0.columns.isEmpty)
checkTableOwner(po0)
+ val accessType0 = ranger.AccessType(po0, operationType, isInput = true)
+ assert(accessType0 === AccessType.READ)
assert(out.size === 1)
val po1 = out.head
@@ -1447,8 +1449,8 @@ class HiveCatalogPrivilegeBuilderSuite extends
PrivilegesBuilderSuite {
assert(po1.objectName equalsIgnoreCase tableName.split("\\.").last)
assert(po1.columns.isEmpty)
checkTableOwner(po1)
- val accessType0 = ranger.AccessType(po1, operationType, isInput = false)
- assert(accessType0 === AccessType.UPDATE)
+ val accessType1 = ranger.AccessType(po1, operationType, isInput = false)
+ assert(accessType1 === AccessType.UPDATE)
}
}
@@ -1477,12 +1479,12 @@ class HiveCatalogPrivilegeBuilderSuite extends
PrivilegesBuilderSuite {
assert(out.size == 1)
val po1 = out.head
assert(po1.actionType === PrivilegeObjectActionType.INSERT_OVERWRITE)
- assert(po1.privilegeObjectType === PrivilegeObjectType.DFS_URL)
+ assert(po1.privilegeObjectType === PrivilegeObjectType.DFS_URI)
assert(po1.dbname === directory.path)
assert(po1.objectName === null)
assert(po1.columns === Seq.empty)
- val accessType1 = ranger.AccessType(po1, operationType, isInput = true)
- assert(accessType1 == AccessType.UPDATE)
+ val accessType1 = ranger.AccessType(po1, operationType, isInput = false)
+ assert(accessType1 == AccessType.WRITE)
}
test("InsertIntoDataSourceCommand") {
@@ -1538,7 +1540,6 @@ class HiveCatalogPrivilegeBuilderSuite extends
PrivilegesBuilderSuite {
checkTableOwner(po)
val accessType = ranger.AccessType(po, operationType, isInput = false)
assert(accessType === AccessType.UPDATE)
-
}
}
}
@@ -1609,12 +1610,12 @@ class HiveCatalogPrivilegeBuilderSuite extends
PrivilegesBuilderSuite {
assert(out.size == 1)
val po1 = out.head
assert(po1.actionType === PrivilegeObjectActionType.INSERT_OVERWRITE)
- assert(po1.privilegeObjectType === PrivilegeObjectType.DFS_URL)
+ assert(po1.privilegeObjectType === PrivilegeObjectType.DFS_URI)
assert(po1.dbname === directory.path)
assert(po1.objectName === null)
assert(po1.columns === Seq.empty)
- val accessType1 = ranger.AccessType(po1, operationType, isInput = true)
- assert(accessType1 == AccessType.UPDATE)
+ val accessType1 = ranger.AccessType(po1, operationType, isInput = false)
+ assert(accessType1 == AccessType.WRITE)
}
test("InsertIntoHiveDirCommand") {
@@ -1642,12 +1643,12 @@ class HiveCatalogPrivilegeBuilderSuite extends
PrivilegesBuilderSuite {
assert(out.size == 1)
val po1 = out.head
assert(po1.actionType === PrivilegeObjectActionType.INSERT_OVERWRITE)
- assert(po1.privilegeObjectType === PrivilegeObjectType.DFS_URL)
+ assert(po1.privilegeObjectType === PrivilegeObjectType.DFS_URI)
assert(po1.dbname === directory.path)
assert(po1.objectName === null)
assert(po1.columns === Seq.empty)
- val accessType1 = ranger.AccessType(po1, operationType, isInput = true)
- assert(accessType1 == AccessType.UPDATE)
+ val accessType1 = ranger.AccessType(po1, operationType, isInput = false)
+ assert(accessType1 == AccessType.WRITE)
}
test("InsertIntoHiveTableCommand") {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala
index 8cff1698d..1022e97b9 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/HudiCatalogRangerSparkExtensionSuite.scala
@@ -333,34 +333,34 @@ class HudiCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
val compactOnPath = s"RUN COMPACTION ON '$path1'"
interceptContains[AccessControlException](
doAs(someone, sql(compactOnPath)))(
- s"does not have [create] privilege on [[$path1, $path1/]]")
+ s"does not have [write] privilege on [[$path1, $path1/]]")
val showCompactOnPath = s"SHOW COMPACTION ON '$path1'"
interceptContains[AccessControlException](
doAs(someone, sql(showCompactOnPath)))(
- s"does not have [select] privilege on [[$path1, $path1/]]")
+ s"does not have [read] privilege on [[$path1, $path1/]]")
val path2 = "file:///demo/test/hudi/path"
val compactOnPath2 = s"RUN COMPACTION ON '$path2'"
interceptContains[AccessControlException](
doAs(someone, sql(compactOnPath2)))(
- s"does not have [create] privilege on [[$path2, $path2/]]")
+ s"does not have [write] privilege on [[$path2, $path2/]]")
val showCompactOnPath2 = s"SHOW COMPACTION ON '$path2'"
interceptContains[AccessControlException](
doAs(someone, sql(showCompactOnPath2)))(
- s"does not have [select] privilege on [[$path2, $path2/]]")
+ s"does not have [read] privilege on [[$path2, $path2/]]")
val path3 = "hdfs://demo/test/hudi/path"
val compactOnPath3 = s"RUN COMPACTION ON '$path3'"
interceptContains[AccessControlException](
doAs(someone, sql(compactOnPath3)))(
- s"does not have [create] privilege on [[$path3, $path3/]]")
+ s"does not have [write] privilege on [[$path3, $path3/]]")
val showCompactOnPath3 = s"SHOW COMPACTION ON '$path3/'"
interceptContains[AccessControlException](
doAs(someone, sql(showCompactOnPath3)))(
- s"does not have [select] privilege on [[$path3, $path3/]]")
+ s"does not have [read] privilege on [[$path3, $path3/]]")
}
}
}
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
index ca0d7abbb..291745a0d 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
@@ -1059,7 +1059,7 @@ class HiveCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
|ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|SELECT * FROM $db1.$table1""".stripMargin)))(
s"does not have [select] privilege on
[$db1/$table1/id,$db1/$table1/scope], " +
- s"[update] privilege on [[$path, $path/]]")
+ s"[write] privilege on [[$path, $path/]]")
}
}
}
@@ -1080,7 +1080,7 @@ class HiveCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
|USING parquet
|SELECT * FROM $db1.$table1""".stripMargin)))(
s"does not have [select] privilege on
[$db1/$table1/id,$db1/$table1/scope], " +
- s"[update] privilege on [[$path, $path/]]")
+ s"[write] privilege on [[$path, $path/]]")
}
}
}
@@ -1093,7 +1093,7 @@ class HiveCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
interceptContains[AccessControlException](doAs(
someone,
df.write.format("console").mode("append").save(path.toString)))(
- s"does not have [update] privilege on [[$path, $path/]]")
+ s"does not have [write] privilege on [[$path, $path/]]")
}
}
}
@@ -1122,12 +1122,12 @@ class HiveCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
|USING parquet
|SELECT * FROM $db1.$table1""".stripMargin)))(
s"does not have [select] privilege on
[$db1/$table1/id,$db1/$table1/scope], " +
- s"[update] privilege on [[$path, $path/]]")
+ s"[write] privilege on [[$path, $path/]]")
doAs(admin, sql(s"SELECT * FROM
parquet.`$path`".stripMargin).explain(true))
interceptContains[AccessControlException](
doAs(someone, sql(s"SELECT * FROM
parquet.`$path`".stripMargin).explain(true)))(
- s"does not have [select] privilege on " +
+ s"does not have [read] privilege on " +
s"[[file:$path, file:$path/]]")
}
}
@@ -1149,7 +1149,7 @@ class HiveCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
doAs(admin, sql(loadDataSql).explain(true))
interceptContains[AccessControlException](
doAs(someone, sql(loadDataSql).explain(true)))(
- s"does not have [select] privilege on " +
+ s"does not have [read] privilege on " +
s"[[$path, $path/]]")
}
}