This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch branch-1.7
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/branch-1.7 by this push:
new 99194cfcf [KYUUBI #4658] [Authz] [Bug] Fix InsertIntoHiveDirCommand
classname so that we can extract the query in it when authorization.
99194cfcf is described below
commit 99194cfcf5eae9c2c43609c26f4b37110ae39b19
Author: Karsonnel <[email protected]>
AuthorDate: Tue Apr 4 13:46:01 2023 +0800
[KYUUBI #4658] [Authz] [Bug] Fix InsertIntoHiveDirCommand classname so that
we can extract the query in it when authorization.
### _Why are the changes needed?_
To fix https://github.com/apache/kyuubi/issues/4658.
### _How was this patch tested?_
Add ut that will run a InsertHiveDirCommand which query from a no
permission table
- [ ] Add some test cases that check the changes thoroughly including
negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [x] [Run
test](https://kyuubi.readthedocs.io/en/master/develop_tools/testing.html#running-tests)
locally before make a pull request
Closes #4660 from Karsonnel/4658-authz-insert.
Closes #4658
1dfb60ea4 [Karsonnel] fix style
8063ec067 [Karsonnel] Update PrivilegesBuilderSuite.scala
4c6c8e1e2 [Karsonnel] add a test in privilegeBuilderSuite
5c652d3df [root] fix InsertIntoHiveDirCommand classname
Lead-authored-by: Karsonnel <[email protected]>
Co-authored-by: root <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
(cherry picked from commit 7a83901ea24a8035fbbed8de1e20b05712becfef)
Signed-off-by: Cheng Pan <[email protected]>
---
.../src/main/resources/table_command_spec.json | 2 +-
.../spark/authz/PrivilegesBuilderSuite.scala | 28 +++++++++++++++++++++-
.../plugin/spark/authz/gen/TableCommands.scala | 2 +-
.../authz/ranger/RangerSparkExtensionSuite.scala | 17 +++++++++++++
4 files changed, 46 insertions(+), 3 deletions(-)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index f1c2297b3..81ccd8da0 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -1244,7 +1244,7 @@
"fieldExtractor" : "LogicalPlanQueryExtractor"
} ]
}, {
- "classname" :
"org.apache.spark.sql.execution.datasources.InsertIntoHiveDirCommand",
+ "classname" : "org.apache.spark.sql.hive.execution.InsertIntoHiveDirCommand",
"tableDescs" : [ ],
"opType" : "QUERY",
"queryDescs" : [ {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
index 439290917..e9483eb34 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
@@ -1546,7 +1546,7 @@ class HiveCatalogPrivilegeBuilderSuite extends
PrivilegesBuilderSuite {
}
}
- test("InsertIntoHiveDirCommand") {
+ test("InsertIntoDataSourceDirCommand") {
assume(!isSparkV2)
val tableDirectory = getClass.getResource("/").getPath + "table_directory"
val directory = File(tableDirectory).createDirectory()
@@ -1572,6 +1572,32 @@ class HiveCatalogPrivilegeBuilderSuite extends
PrivilegesBuilderSuite {
assert(out.isEmpty)
}
+ test("InsertIntoHiveDirCommand") {
+ assume(!isSparkV2)
+ val tableDirectory = getClass.getResource("/").getPath + "table_directory"
+ val directory = File(tableDirectory).createDirectory()
+ val plan = sql(
+ s"""
+ |INSERT OVERWRITE DIRECTORY '$directory.path'
+ |ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
+ |SELECT * FROM $reusedPartTable""".stripMargin)
+ .queryExecution.analyzed
+ val (in, out, operationType) = PrivilegesBuilder.build(plan, spark)
+ assert(operationType === QUERY)
+ assert(in.size === 1)
+ val po0 = in.head
+ assert(po0.actionType === PrivilegeObjectActionType.OTHER)
+ assert(po0.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
+ assert(po0.dbname equalsIgnoreCase reusedDb)
+ assert(po0.objectName equalsIgnoreCase reusedPartTable.split("\\.").last)
+ assert(po0.columns === Seq("key", "value", "pid"))
+ checkTableOwner(po0)
+ val accessType0 = ranger.AccessType(po0, operationType, isInput = true)
+ assert(accessType0 === AccessType.SELECT)
+
+ assert(out.isEmpty)
+ }
+
test("InsertIntoHiveTableCommand") {
assume(!isSparkV2)
val tableName = "InsertIntoHiveTable"
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
index a8b8121e2..7bf01b43f 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
@@ -637,7 +637,7 @@ object TableCommands {
"org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand"),
InsertIntoHadoopFsRelationCommand,
InsertIntoDataSourceDir.copy(classname =
- "org.apache.spark.sql.execution.datasources.InsertIntoHiveDirCommand"),
+
"org.apache.spark.sql.execution.datasources.InsertIntoDataSourceDirCommand"),
InsertIntoHiveTable,
LoadData,
MergeIntoTable,
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
index 6479af3c7..4f4d4a618 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
@@ -784,4 +784,21 @@ class HiveCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
sql(s"SHOW TABLES IN $db").queryExecution.optimizedPlan.stats
}
}
+
+ test("[KYUUBI #4658] INSERT OVERWRITE DIRECTORY did check query permission")
{
+ val db1 = "default"
+ val table = "src"
+
+ withCleanTmpResources(Seq((s"$db1.$table", "table"))) {
+ doAs("bob", sql(s"CREATE TABLE IF NOT EXISTS $db1.$table (id int, name
string)"))
+ val e1 = intercept[AccessControlException](
+ doAs(
+ "someone",
+ sql(
+ s"""INSERT OVERWRITE DIRECTORY '/tmp/test_dir' ROW FORMAT
DELIMITED FIELDS
+ | TERMINATED BY ','
+ | SELECT * FROM $db1.$table;""".stripMargin)))
+ assert(e1.getMessage.contains(s"does not have [select] privilege on
[$db1/$table/id"))
+ }
+ }
}