This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch branch-1.8
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/branch-1.8 by this push:
     new 71a47f38b [KYUUBI #5271][Bug] AnalyzeTableCommand should also add 
table write privilege
71a47f38b is described below

commit 71a47f38b549502cb17ba8c89c94c9fb52958b5b
Author: Angerszhuuuu <[email protected]>
AuthorDate: Wed Sep 13 19:49:57 2023 +0800

    [KYUUBI #5271][Bug] AnalyzeTableCommand should also add table write 
privilege
    
    ### _Why are the changes needed?_
    Since AnalyzeTableCommand also update table's metadata, since alter command 
also need table write privilege, AnalyzeTableCommand need too
    
    ### _How was this patch tested?_
    - [x] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [ ] [Run 
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
 locally before make a pull request
    
    ### _Was this patch authored or co-authored using generative AI tooling?_
    
    Closes #5272 from AngersZhuuuu/KYUUBI-5271.
    
    Closes #5271
    
    ad5c70403 [Angerszhuuuu] Merge branch 'KYUUBI-5271' of 
https://github.com/AngersZhuuuu/incubator-kyuubi into KYUUBI-5271
    a5932b7e8 [Angerszhuuuu] Update TableCommands.scala
    97ee3299c [Angerszhuuuu] Merge branch 'master' into KYUUBI-5271
    cdd8100fd [Angerszhuuuu] Update PrivilegesBuilderSuite.scala
    5f110563e [Angerszhuuuu] update
    92c30454b [Angerszhuuuu] Revert "Update TableCommands.scala"
    504ff2a26 [Angerszhuuuu] Update TableCommands.scala
    6c4233680 [Angerszhuuuu] [KYUUBI #5271][Bug] AnalyzeTableCommand should 
also add table write privilege
    
    Authored-by: Angerszhuuuu <[email protected]>
    Signed-off-by: Kent Yao <[email protected]>
    (cherry picked from commit b21ae88545be0d9dce450b7bf0668e8ea61a48ef)
    Signed-off-by: Kent Yao <[email protected]>
---
 .../src/main/resources/table_command_spec.json     | 33 +++++++++++++--
 .../spark/authz/PrivilegesBuilderSuite.scala       | 49 ++++++++++++++++++----
 .../plugin/spark/authz/gen/TableCommands.scala     | 13 +++---
 3 files changed, 78 insertions(+), 17 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index 0025fae75..de0e03cac 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -851,6 +851,15 @@
 }, {
   "classname" : "org.apache.spark.sql.execution.command.AnalyzeColumnCommand",
   "tableDescs" : [ {
+    "fieldName" : "tableIdent",
+    "fieldExtractor" : "TableIdentifierTableExtractor",
+    "columnDesc" : null,
+    "actionTypeDesc" : null,
+    "tableTypeDesc" : null,
+    "catalogDesc" : null,
+    "isInput" : false,
+    "setCurrentDatabaseIfMissing" : false
+  }, {
     "fieldName" : "tableIdent",
     "fieldExtractor" : "TableIdentifierTableExtractor",
     "columnDesc" : {
@@ -875,11 +884,20 @@
     "isInput" : true,
     "setCurrentDatabaseIfMissing" : false
   } ],
-  "opType" : "ANALYZE_TABLE",
+  "opType" : "ALTERTABLE_PROPERTIES",
   "queryDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.execution.command.AnalyzePartitionCommand",
   "tableDescs" : [ {
+    "fieldName" : "tableIdent",
+    "fieldExtractor" : "TableIdentifierTableExtractor",
+    "columnDesc" : null,
+    "actionTypeDesc" : null,
+    "tableTypeDesc" : null,
+    "catalogDesc" : null,
+    "isInput" : false,
+    "setCurrentDatabaseIfMissing" : false
+  }, {
     "fieldName" : "tableIdent",
     "fieldExtractor" : "TableIdentifierTableExtractor",
     "columnDesc" : {
@@ -892,11 +910,20 @@
     "isInput" : true,
     "setCurrentDatabaseIfMissing" : false
   } ],
-  "opType" : "ANALYZE_TABLE",
+  "opType" : "ALTERTABLE_PROPERTIES",
   "queryDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.AnalyzeTableCommand",
   "tableDescs" : [ {
+    "fieldName" : "tableIdent",
+    "fieldExtractor" : "TableIdentifierTableExtractor",
+    "columnDesc" : null,
+    "actionTypeDesc" : null,
+    "tableTypeDesc" : null,
+    "catalogDesc" : null,
+    "isInput" : false,
+    "setCurrentDatabaseIfMissing" : false
+  }, {
     "fieldName" : "tableIdent",
     "fieldExtractor" : "TableIdentifierTableExtractor",
     "columnDesc" : null,
@@ -906,7 +933,7 @@
     "isInput" : true,
     "setCurrentDatabaseIfMissing" : false
   } ],
-  "opType" : "ANALYZE_TABLE",
+  "opType" : "ALTERTABLE_PROPERTIES",
   "queryDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.execution.command.CacheTableCommand",
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
index 878aa7dad..723fabd7b 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
@@ -379,7 +379,7 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
     val plan = sql(s"ANALYZE TABLE $reusedPartTable PARTITION (pid=1)" +
       s" COMPUTE STATISTICS FOR COLUMNS key").queryExecution.analyzed
     val (in, out, operationType) = PrivilegesBuilder.build(plan, spark)
-    assert(operationType === ANALYZE_TABLE)
+    assert(operationType === ALTERTABLE_PROPERTIES)
     assert(in.size === 1)
     val po0 = in.head
     assert(po0.actionType === PrivilegeObjectActionType.OTHER)
@@ -390,16 +390,27 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
     assert(po0.columns === Seq("key"))
     checkTableOwner(po0)
     val accessType0 = ranger.AccessType(po0, operationType, isInput = true)
-    assert(accessType0 === AccessType.SELECT)
+    assert(accessType0 === AccessType.ALTER)
+
+    assert(out.size === 1)
+    val po1 = out.head
+    assert(po1.actionType === PrivilegeObjectActionType.OTHER)
+    assert(po1.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
+    assertEqualsIgnoreCase(reusedDb)(po1.dbname)
+    assertEqualsIgnoreCase(reusedPartTableShort)(po1.objectName)
+    // ignore this check as it behaves differently across spark versions
+    assert(po1.columns.isEmpty)
+    checkTableOwner(po1)
+    val accessType1 = ranger.AccessType(po1, operationType, isInput = true)
+    assert(accessType1 === AccessType.ALTER)
 
-    assert(out.size === 0)
   }
 
   test("AnalyzePartitionCommand") {
     val plan = sql(s"ANALYZE TABLE $reusedPartTable" +
       s" PARTITION (pid = 1) COMPUTE STATISTICS").queryExecution.analyzed
     val (in, out, operationType) = PrivilegesBuilder.build(plan, spark)
-    assert(operationType === ANALYZE_TABLE)
+    assert(operationType === ALTERTABLE_PROPERTIES)
 
     assert(in.size === 1)
     val po0 = in.head
@@ -411,9 +422,19 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
     assert(po0.columns === Seq("pid"))
     checkTableOwner(po0)
     val accessType0 = ranger.AccessType(po0, operationType, isInput = true)
-    assert(accessType0 === AccessType.SELECT)
+    assert(accessType0 === AccessType.ALTER)
 
-    assert(out.size === 0)
+    assert(out.size === 1)
+    val po1 = out.head
+    assert(po1.actionType === PrivilegeObjectActionType.OTHER)
+    assert(po1.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
+    assertEqualsIgnoreCase(reusedDb)(po1.dbname)
+    assertEqualsIgnoreCase(reusedPartTableShort)(po1.objectName)
+    // ignore this check as it behaves differently across spark versions
+    assert(po1.columns.isEmpty)
+    checkTableOwner(po1)
+    val accessType1 = ranger.AccessType(po1, operationType, isInput = true)
+    assert(accessType1 === AccessType.ALTER)
   }
 
   test("AnalyzeTableCommand") {
@@ -421,7 +442,7 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
       .queryExecution.analyzed
     val (in, out, operationType) = PrivilegesBuilder.build(plan, spark)
 
-    assert(operationType === ANALYZE_TABLE)
+    assert(operationType === ALTERTABLE_PROPERTIES)
     assert(in.size === 1)
     val po0 = in.head
     assert(po0.actionType === PrivilegeObjectActionType.OTHER)
@@ -432,9 +453,19 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
     assert(po0.columns.isEmpty)
     checkTableOwner(po0)
     val accessType0 = ranger.AccessType(po0, operationType, isInput = true)
-    assert(accessType0 === AccessType.SELECT)
+    assert(accessType0 === AccessType.ALTER)
 
-    assert(out.size === 0)
+    assert(out.size === 1)
+    val po1 = out.head
+    assert(po1.actionType === PrivilegeObjectActionType.OTHER)
+    assert(po1.privilegeObjectType === PrivilegeObjectType.TABLE_OR_VIEW)
+    assertEqualsIgnoreCase(reusedDb)(po1.dbname)
+    assertEqualsIgnoreCase(reusedPartTableShort)(po1.objectName)
+    // ignore this check as it behaves differently across spark versions
+    assert(po1.columns.isEmpty)
+    checkTableOwner(po1)
+    val accessType1 = ranger.AccessType(po1, operationType, isInput = true)
+    assert(accessType1 === AccessType.ALTER)
   }
 
   test("AnalyzeTablesCommand") {
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
index f3754e4b9..ca2ee9294 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
@@ -181,7 +181,8 @@ object TableCommands {
     val cd2 = cd1.copy(fieldExtractor = 
classOf[StringSeqOptionColumnExtractor])
     val td1 = tableIdentDesc.copy(columnDesc = Some(cd1), isInput = true)
     val td2 = td1.copy(columnDesc = Some(cd2))
-    TableCommandSpec(cmd, Seq(td1, td2), ANALYZE_TABLE)
+    // AnalyzeColumn will update table properties, here we use 
ALTERTABLE_PROPERTIES
+    TableCommandSpec(cmd, Seq(tableIdentDesc, td1, td2), ALTERTABLE_PROPERTIES)
   }
 
   val AnalyzePartition = {
@@ -189,16 +190,18 @@ object TableCommands {
     val columnDesc = ColumnDesc("partitionSpec", 
classOf[PartitionColumnExtractor])
     TableCommandSpec(
       cmd,
-      Seq(tableIdentDesc.copy(columnDesc = Some(columnDesc), isInput = true)),
-      ANALYZE_TABLE)
+      // AnalyzePartition will update table properties, here we use 
ALTERTABLE_PROPERTIES
+      Seq(tableIdentDesc, tableIdentDesc.copy(columnDesc = Some(columnDesc), 
isInput = true)),
+      ALTERTABLE_PROPERTIES)
   }
 
   val AnalyzeTable = {
     val cmd = "org.apache.spark.sql.execution.command.AnalyzeTableCommand"
     TableCommandSpec(
       cmd,
-      Seq(tableIdentDesc.copy(isInput = true)),
-      ANALYZE_TABLE)
+      // AnalyzeTable will update table properties, here we use 
ALTERTABLE_PROPERTIES
+      Seq(tableIdentDesc, tableIdentDesc.copy(isInput = true)),
+      ALTERTABLE_PROPERTIES)
   }
 
   val CreateTableV2 = {

Reply via email to