This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new eb84e1538 [KYUUBI #5652][AUTHZ] CreateTable related command support
path privilege
eb84e1538 is described below
commit eb84e1538fa5bcaa809a61d2a1d442133317f570
Author: Angerszhuuuu <[email protected]>
AuthorDate: Fri Nov 10 15:37:55 2023 +0800
[KYUUBI #5652][AUTHZ] CreateTable related command support path privilege
### _Why are the changes needed?_
To close #5652
CreateTable related command support path privilege
### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including
negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [ ] [Run
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
locally before make a pull request
### _Was this patch authored or co-authored using generative AI tooling?_
No
Closes #5656 from AngersZhuuuu/KYUUBI-5652.
Closes #5652
e89d2083b [Angerszhuuuu] Update
org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
002fe3bc2 [Angerszhuuuu] Update
org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
6a31edcef [Angerszhuuuu] Merge branch 'master' into KYUUBI-5652
0d5b4fab2 [Angerszhuuuu] Update RangerSparkExtensionSuite.scala
3e686cf64 [Angerszhuuuu] Revert "Update
extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala"
8b4cbdae7 [Angerszhuuuu] Update
extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
1a9959c9a [Angerszhuuuu] Update RangerSparkExtensionSuite.scala
97e691290 [Angerszhuuuu] Update table_command_spec.json
4b9679966 [Angerszhuuuu] update
f34ebdbc0 [Angerszhuuuu] Update PrivilegesBuilder.scala
239da7f1e [Angerszhuuuu] Merge branch 'master' into KYUUBI-5652
b45ecfca1 [Angerszhuuuu] [KYUUBI #5652][AUTHZ] CreateTable related command
support path privilege
Authored-by: Angerszhuuuu <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
---
...he.kyuubi.plugin.spark.authz.serde.URIExtractor | 2 +
.../src/main/resources/table_command_spec.json | 88 +++++++++++++++++++---
.../plugin/spark/authz/serde/uriExtractors.scala | 16 +++-
.../plugin/spark/authz/gen/TableCommands.scala | 37 +++++++--
.../authz/ranger/RangerSparkExtensionSuite.scala | 56 ++++++++++++++
5 files changed, 179 insertions(+), 20 deletions(-)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
index 9daf156c6..d7c859c7a 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
+++
b/extensions/spark/kyuubi-spark-authz/src/main/resources/META-INF/services/org.apache.kyuubi.plugin.spark.authz.serde.URIExtractor
@@ -17,8 +17,10 @@
org.apache.kyuubi.plugin.spark.authz.serde.BaseRelationFileIndexURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.CatalogStorageFormatURIExtractor
+org.apache.kyuubi.plugin.spark.authz.serde.CatalogTableURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.PartitionLocsSeqURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.PropertiesLocationUriExtractor
org.apache.kyuubi.plugin.spark.authz.serde.PropertiesPathUriExtractor
org.apache.kyuubi.plugin.spark.authz.serde.StringSeqURIExtractor
org.apache.kyuubi.plugin.spark.authz.serde.StringURIExtractor
+org.apache.kyuubi.plugin.spark.authz.serde.TableSpecURIExtractor
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index 272876d52..7b5ebad6c 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -148,7 +148,15 @@
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
- "uriDescs" : [ ]
+ "uriDescs" : [ {
+ "fieldName" : "tableSpec",
+ "fieldExtractor" : "TableSpecURIExtractor",
+ "isInput" : false
+ }, {
+ "fieldName" : "properties",
+ "fieldExtractor" : "PropertiesLocationUriExtractor",
+ "isInput" : false
+ } ]
}, {
"classname" :
"org.apache.spark.sql.catalyst.plans.logical.CreateTableAsSelect",
"tableDescs" : [ {
@@ -187,7 +195,15 @@
"fieldName" : "query",
"fieldExtractor" : "LogicalPlanQueryExtractor"
} ],
- "uriDescs" : [ ]
+ "uriDescs" : [ {
+ "fieldName" : "tableSpec",
+ "fieldExtractor" : "TableSpecURIExtractor",
+ "isInput" : false
+ }, {
+ "fieldName" : "properties",
+ "fieldExtractor" : "PropertiesLocationUriExtractor",
+ "isInput" : false
+ } ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.CreateV2Table",
"tableDescs" : [ {
@@ -205,7 +221,11 @@
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
- "uriDescs" : [ ]
+ "uriDescs" : [ {
+ "fieldName" : "properties",
+ "fieldExtractor" : "PropertiesLocationUriExtractor",
+ "isInput" : false
+ } ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.DeleteFromTable",
"tableDescs" : [ {
@@ -492,7 +512,15 @@
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
- "uriDescs" : [ ]
+ "uriDescs" : [ {
+ "fieldName" : "tableSpec",
+ "fieldExtractor" : "TableSpecURIExtractor",
+ "isInput" : false
+ }, {
+ "fieldName" : "properties",
+ "fieldExtractor" : "PropertiesLocationUriExtractor",
+ "isInput" : false
+ } ]
}, {
"classname" :
"org.apache.spark.sql.catalyst.plans.logical.ReplaceTableAsSelect",
"tableDescs" : [ {
@@ -531,7 +559,15 @@
"fieldName" : "query",
"fieldExtractor" : "LogicalPlanQueryExtractor"
} ],
- "uriDescs" : [ ]
+ "uriDescs" : [ {
+ "fieldName" : "tableSpec",
+ "fieldExtractor" : "TableSpecURIExtractor",
+ "isInput" : false
+ }, {
+ "fieldName" : "properties",
+ "fieldExtractor" : "PropertiesLocationUriExtractor",
+ "isInput" : false
+ } ]
}, {
"classname" :
"org.apache.spark.sql.catalyst.plans.logical.SetTableProperties",
"tableDescs" : [ {
@@ -1012,7 +1048,11 @@
"fieldName" : "query",
"fieldExtractor" : "LogicalPlanQueryExtractor"
} ],
- "uriDescs" : [ ]
+ "uriDescs" : [ {
+ "fieldName" : "table",
+ "fieldExtractor" : "CatalogTableURIExtractor",
+ "isInput" : false
+ } ]
}, {
"classname" :
"org.apache.spark.sql.execution.command.CreateDataSourceTableCommand",
"tableDescs" : [ {
@@ -1027,7 +1067,11 @@
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
- "uriDescs" : [ ]
+ "uriDescs" : [ {
+ "fieldName" : "table",
+ "fieldExtractor" : "CatalogTableURIExtractor",
+ "isInput" : false
+ } ]
}, {
"classname" : "org.apache.spark.sql.execution.command.CreateTableCommand",
"tableDescs" : [ {
@@ -1042,7 +1086,11 @@
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
- "uriDescs" : [ ]
+ "uriDescs" : [ {
+ "fieldName" : "table",
+ "fieldExtractor" : "CatalogTableURIExtractor",
+ "isInput" : false
+ } ]
}, {
"classname" :
"org.apache.spark.sql.execution.command.CreateTableLikeCommand",
"tableDescs" : [ {
@@ -1066,7 +1114,11 @@
} ],
"opType" : "CREATETABLE",
"queryDescs" : [ ],
- "uriDescs" : [ ]
+ "uriDescs" : [ {
+ "fieldName" : "fileFormat",
+ "fieldExtractor" : "CatalogStorageFormatURIExtractor",
+ "isInput" : false
+ } ]
}, {
"classname" : "org.apache.spark.sql.execution.command.CreateViewCommand",
"tableDescs" : [ {
@@ -1329,7 +1381,11 @@
"fieldName" : "query",
"fieldExtractor" : "LogicalPlanOptionQueryExtractor"
} ],
- "uriDescs" : [ ]
+ "uriDescs" : [ {
+ "fieldName" : "tableDesc",
+ "fieldExtractor" : "CatalogTableURIExtractor",
+ "isInput" : false
+ } ]
}, {
"classname" :
"org.apache.spark.sql.execution.datasources.CreateTempViewUsing",
"tableDescs" : [ ],
@@ -1431,7 +1487,11 @@
"fieldName" : "query",
"fieldExtractor" : "LogicalPlanQueryExtractor"
} ],
- "uriDescs" : [ ]
+ "uriDescs" : [ {
+ "fieldName" : "tableDesc",
+ "fieldExtractor" : "CatalogTableURIExtractor",
+ "isInput" : false
+ } ]
}, {
"classname" : "org.apache.spark.sql.hive.execution.InsertIntoHiveDirCommand",
"tableDescs" : [ ],
@@ -1490,7 +1550,11 @@
"fieldName" : "query",
"fieldExtractor" : "LogicalPlanQueryExtractor"
} ],
- "uriDescs" : [ ]
+ "uriDescs" : [ {
+ "fieldName" : "tableDesc",
+ "fieldExtractor" : "CatalogTableURIExtractor",
+ "isInput" : false
+ } ]
}, {
"classname" : "org.apache.spark.sql.catalyst.plans.logical.Call",
"tableDescs" : [ {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
index 6e0c232d2..77c7367fe 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/uriExtractors.scala
@@ -17,9 +17,11 @@
package org.apache.kyuubi.plugin.spark.authz.serde
-import org.apache.spark.sql.catalyst.catalog.CatalogStorageFormat
+import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat,
CatalogTable}
import org.apache.spark.sql.execution.datasources.HadoopFsRelation
+import org.apache.kyuubi.util.reflect.ReflectUtils.invokeAs
+
trait URIExtractor extends (AnyRef => Seq[Uri]) with Extractor
object URIExtractor {
@@ -74,6 +76,18 @@ class BaseRelationFileIndexURIExtractor extends URIExtractor
{
}
}
+class TableSpecURIExtractor extends URIExtractor {
+ override def apply(v1: AnyRef): Seq[Uri] = {
+ new StringURIExtractor().apply(invokeAs[Option[String]](v1, "location"))
+ }
+}
+
+class CatalogTableURIExtractor extends URIExtractor {
+ override def apply(v1: AnyRef): Seq[Uri] = {
+
v1.asInstanceOf[CatalogTable].storage.locationUri.map(_.toString).map(Uri).toSeq
+ }
+}
+
class PartitionLocsSeqURIExtractor extends URIExtractor {
override def apply(v1: AnyRef): Seq[Uri] = {
v1.asInstanceOf[Seq[(_, Option[String])]].flatMap(_._2).map(Uri)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
index 6a3e1d75a..faf1b49ee 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
@@ -214,10 +214,14 @@ object TableCommands extends
CommandSpecs[TableCommandSpec] {
"tableName",
classOf[IdentifierTableExtractor],
catalogDesc = Some(CatalogDesc()))
+ val uriDescs = Seq(
+ UriDesc("tableSpec", classOf[TableSpecURIExtractor]),
+ UriDesc("properties", classOf[PropertiesLocationUriExtractor]))
TableCommandSpec(
cmd,
Seq(resolvedIdentifierTableDesc, tableDesc, resolvedDbObjectNameDesc),
- CREATETABLE)
+ CREATETABLE,
+ uriDescs = uriDescs)
}
val CreateV2Table = {
@@ -226,7 +230,8 @@ object TableCommands extends CommandSpecs[TableCommandSpec]
{
"tableName",
classOf[IdentifierTableExtractor],
catalogDesc = Some(CatalogDesc()))
- TableCommandSpec(cmd, Seq(tableDesc), CREATETABLE)
+ val uriDescs = Seq(UriDesc("properties",
classOf[PropertiesLocationUriExtractor]))
+ TableCommandSpec(cmd, Seq(tableDesc), CREATETABLE, uriDescs = uriDescs)
}
val CreateTableAsSelectV2 = {
@@ -235,6 +240,9 @@ object TableCommands extends CommandSpecs[TableCommandSpec]
{
"tableName",
classOf[IdentifierTableExtractor],
catalogDesc = Some(CatalogDesc()))
+ val uriDescs = Seq(
+ UriDesc("tableSpec", classOf[TableSpecURIExtractor]),
+ UriDesc("properties", classOf[PropertiesLocationUriExtractor]))
TableCommandSpec(
cmd,
Seq(
@@ -242,7 +250,8 @@ object TableCommands extends CommandSpecs[TableCommandSpec]
{
tableDesc,
resolvedDbObjectNameDesc.copy(fieldName = "name")),
CREATETABLE_AS_SELECT,
- Seq(queryQueryDesc))
+ Seq(queryQueryDesc),
+ uriDescs = uriDescs)
}
val CommentOnTable = {
@@ -376,14 +385,21 @@ object TableCommands extends
CommandSpecs[TableCommandSpec] {
val cmd = "org.apache.spark.sql.execution.datasources.CreateTable"
val tableDesc = TableDesc("tableDesc", classOf[CatalogTableTableExtractor])
val queryDesc = QueryDesc("query", "LogicalPlanOptionQueryExtractor")
- TableCommandSpec(cmd, Seq(tableDesc), CREATETABLE, queryDescs =
Seq(queryDesc))
+ val uriDesc = UriDesc("tableDesc", classOf[CatalogTableURIExtractor])
+ TableCommandSpec(
+ cmd,
+ Seq(tableDesc),
+ CREATETABLE,
+ queryDescs = Seq(queryDesc),
+ uriDescs = Seq(uriDesc))
}
val CreateDataSourceTable = {
val cmd =
"org.apache.spark.sql.execution.command.CreateDataSourceTableCommand"
val tableDesc =
TableDesc("table", classOf[CatalogTableTableExtractor],
setCurrentDatabaseIfMissing = true)
- TableCommandSpec(cmd, Seq(tableDesc), CREATETABLE)
+ val uriDesc = UriDesc("table", classOf[CatalogTableURIExtractor])
+ TableCommandSpec(cmd, Seq(tableDesc), CREATETABLE, uriDescs = Seq(uriDesc))
}
val CreateDataSourceTableAsSelect = {
@@ -399,8 +415,14 @@ object TableCommands extends
CommandSpecs[TableCommandSpec] {
val columnDesc = ColumnDesc("outputColumnNames",
classOf[StringSeqColumnExtractor])
val tableDesc =
TableDesc("tableDesc", classOf[CatalogTableTableExtractor],
Some(columnDesc))
+ val uriDesc = UriDesc("tableDesc", classOf[CatalogTableURIExtractor])
val queryDesc = queryQueryDesc
- TableCommandSpec(cmd, Seq(tableDesc), "CREATETABLE_AS_SELECT", queryDescs
= Seq(queryDesc))
+ TableCommandSpec(
+ cmd,
+ Seq(tableDesc),
+ "CREATETABLE_AS_SELECT",
+ queryDescs = Seq(queryDesc),
+ uriDescs = Seq(uriDesc))
}
val CreateTableLike = {
@@ -414,7 +436,8 @@ object TableCommands extends CommandSpecs[TableCommandSpec]
{
classOf[TableIdentifierTableExtractor],
isInput = true,
setCurrentDatabaseIfMissing = true)
- TableCommandSpec(cmd, Seq(tableDesc1, tableDesc2), CREATETABLE)
+ val uriDesc = UriDesc("fileFormat",
classOf[CatalogStorageFormatURIExtractor])
+ TableCommandSpec(cmd, Seq(tableDesc1, tableDesc2), CREATETABLE, uriDescs =
Seq(uriDesc))
}
val DescribeColumn = {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
index eaa6a3fa2..571e206f4 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
@@ -1240,4 +1240,60 @@ class HiveCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
}
}
}
+
+ test("Table Command location privilege") {
+ val db1 = defaultDb
+ val table1 = "table1"
+ val table2 = "table2"
+ withSingleCallEnabled {
+ withTempDir { path =>
+ withCleanTmpResources(Seq((s"$db1.$table1", "table"),
(s"$db1.$table2", "table"))) {
+ interceptContains[AccessControlException](doAs(
+ someone,
+ sql(
+ s"""
+ |CREATE TABLE IF NOT EXISTS $db1.$table1(id int, scope int)
+ |LOCATION '$path'""".stripMargin)))(
+ s"does not have [create] privilege on [$db1/$table1]")
+ doAs(
+ admin,
+ sql(
+ s"""
+ |CREATE TABLE IF NOT EXISTS $db1.$table1(id int, scope int)
+ |LOCATION '$path'""".stripMargin))
+ interceptContains[AccessControlException](
+ doAs(
+ someone,
+ sql(
+ s"""
+ |CREATE TABLE $db1.$table2
+ |LIKE $db1.$table1
+ |LOCATION '$path'
+ |""".stripMargin)))(
+ s"does not have [select] privilege on [$db1/$table1], " +
+ s"[create] privilege on [$db1/$table2], " +
+ s"[write] privilege on [[$path, $path/]]")
+ interceptContains[AccessControlException](
+ doAs(
+ someone,
+ sql(
+ s"""
+ |CREATE TABLE $db1.$table2
+ |LOCATION '$path'
+ |AS
+ |SELECT * FROM $db1.$table1
+ |""".stripMargin)))(
+ if (!isSparkV35OrGreater) {
+ s"does not have [select] privilege on
[$db1/$table1/id,$db1/$table1/scope], " +
+ s"[create] privilege on [$db1/$table2/id,$db1/$table2/scope],
" +
+ s"[write] privilege on [[$path, $path/]]"
+ } else {
+ s"does not have [select] privilege on
[$db1/$table1/id,$db1/$table1/scope], " +
+ s"[create] privilege on [$db1/$table2/id,$db1/$table2/scope],
" +
+ s"[write] privilege on [[file://$path, file://$path/]]"
+ })
+ }
+ }
+ }
+ }
}