This is an automated email from the ASF dual-hosted git repository.
yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 51dd31c53 [KYUUBI #5611][AUTHZ] Authz support path privilege for
SaveIntoDataSourceCommand
51dd31c53 is described below
commit 51dd31c53722f91f0c47c97fbacfb139a097717e
Author: Angerszhuuuu <[email protected]>
AuthorDate: Fri Nov 3 10:37:31 2023 +0800
[KYUUBI #5611][AUTHZ] Authz support path privilege for
SaveIntoDataSourceCommand
### _Why are the changes needed?_
To close #5611
Authz support path privilege for SaveIntoDataSourceCommand
### _How was this patch tested?_
- [x] Add some test cases that check the changes thoroughly including
negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [ ] [Run
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
locally before make a pull request
### _Was this patch authored or co-authored using generative AI tooling?_
No
Closes #5613 from AngersZhuuuu/KYUUBI-5611.
Closes #5611
24081b1bc [Angerszhuuuu] [KYUUBI #5611][AUTHZ] Authz support path privilege
for SaveIntoDataSourceCommand
Authored-by: Angerszhuuuu <[email protected]>
Signed-off-by: Kent Yao <[email protected]>
---
.../org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor | 1 +
.../src/main/resources/table_command_spec.json | 6 +++++-
.../kyuubi/plugin/spark/authz/serde/uriExtractors.scala | 6 ++++++
.../apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala | 3 ++-
.../plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala | 11 +++++++++++
5 files changed, 25 insertions(+), 2 deletions(-)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
index e78da29df..f0038e75b 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
+++
b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
@@ -16,4 +16,5 @@
#
org.apache.kyuubi.plugin.spark.authz.serde.CatalogStorageFormatURIExtractor
+org.apache.kyuubi.plugin.spark.authz.serde.OptionsUriExtractor
org.apache.kyuubi.plugin.spark.authz.serde.StringURIExtractor
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index c2368c2f4..581f33506 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -1344,7 +1344,11 @@
"fieldName" : "query",
"fieldExtractor" : "LogicalPlanQueryExtractor"
} ],
- "uriDescs" : [ ]
+ "uriDescs" : [ {
+ "fieldName" : "options",
+ "fieldExtractor" : "OptionsUriExtractor",
+ "isInput" : false
+ } ]
}, {
"classname" :
"org.apache.spark.sql.hive.execution.CreateHiveTableAsSelectCommand",
"tableDescs" : [ {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
index 7feca1511..418c5a0a0 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
@@ -41,3 +41,9 @@ class CatalogStorageFormatURIExtractor extends URIExtractor {
v1.asInstanceOf[CatalogStorageFormat].locationUri.map(uri =>
Uri(uri.getPath)).toSeq
}
}
+
+class OptionsUriExtractor extends URIExtractor {
+ override def apply(v1: AnyRef): Seq[Uri] = {
+ v1.asInstanceOf[Map[String, String]].get("path").map(Uri).toSeq
+ }
+}
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
index b69f08bcf..7e42b02bc 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
@@ -559,7 +559,8 @@ object TableCommands extends CommandSpecs[TableCommandSpec]
{
val SaveIntoDataSourceCommand = {
val cmd =
"org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand"
val queryDesc = queryQueryDesc
- TableCommandSpec(cmd, Nil, queryDescs = Seq(queryDesc))
+ val uriDesc = UriDesc("options", classOf[OptionsUriExtractor])
+ TableCommandSpec(cmd, Nil, queryDescs = Seq(queryDesc), uriDescs =
Seq(uriDesc))
}
val InsertIntoHadoopFsRelationCommand = {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
index e39a953de..1f1b42b0f 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
@@ -1085,4 +1085,15 @@ class HiveCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
}
}
}
+ test("SaveIntoDataSourceCommand") {
+ withTempDir { path =>
+ withSingleCallEnabled {
+ val df = sql("SELECT 1 as id, 'Tony' as name")
+ interceptContains[AccessControlException](doAs(
+ someone,
+ df.write.format("console").save(path.toString)))(
+ s"does not have [select] privilege on [[$path, $path/]]")
+ }
+ }
+ }
}