This is an automated email from the ASF dual-hosted git repository.

bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 5b9290a91 [KYUUBI #5537] [AuthZ] Order Command Spec by Command Class 
[group, classname]
5b9290a91 is described below

commit 5b9290a9141781f5c723722a76284cfb2028be0b
Author: Kent Yao <[email protected]>
AuthorDate: Thu Oct 26 21:46:54 2023 +0800

    [KYUUBI #5537] [AuthZ] Order Command Spec by Command Class [group, 
classname]
    
    ### _Why are the changes needed?_
    
    Settle Spark vanilla commands, iceberg, hudi in separate groups for code 
reviewing
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [ ] [Run 
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
 locally before make a pull request
    
    ### _Was this patch authored or co-authored using generative AI tooling?_
    
    no
    
    Closes #5537 from yaooqinn/group.
    
    Closes #5537
    
    4f88b6405 [Kent Yao] [AuthZ] Order Command Spec by Command Class [group, 
classname]
    
    Authored-by: Kent Yao <[email protected]>
    Signed-off-by: liangbowen <[email protected]>
---
 .../src/main/resources/table_command_spec.json     | 184 ++++++++++-----------
 .../spark/authz/gen/JsonSpecFileGenerator.scala    |  12 +-
 2 files changed, 98 insertions(+), 98 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index ea6e27576..af86ffd1c 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -91,20 +91,6 @@
     "fieldName" : "plan",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
   } ]
-}, {
-  "classname" : "org.apache.spark.sql.catalyst.plans.logical.Call",
-  "tableDescs" : [ {
-    "fieldName" : "args",
-    "fieldExtractor" : "ExpressionSeqTableExtractor",
-    "columnDesc" : null,
-    "actionTypeDesc" : null,
-    "tableTypeDesc" : null,
-    "catalogDesc" : null,
-    "isInput" : false,
-    "setCurrentDatabaseIfMissing" : false
-  } ],
-  "opType" : "ALTERTABLE_PROPERTIES",
-  "queryDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.CommentOnTable",
   "tableDescs" : [ {
@@ -209,24 +195,6 @@
   } ],
   "opType" : "CREATETABLE",
   "queryDescs" : [ ]
-}, {
-  "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.DeleteFromIcebergTable",
-  "tableDescs" : [ {
-    "fieldName" : "table",
-    "fieldExtractor" : "DataSourceV2RelationTableExtractor",
-    "columnDesc" : null,
-    "actionTypeDesc" : {
-      "fieldName" : null,
-      "fieldExtractor" : null,
-      "actionType" : "UPDATE"
-    },
-    "tableTypeDesc" : null,
-    "catalogDesc" : null,
-    "isInput" : false,
-    "setCurrentDatabaseIfMissing" : false
-  } ],
-  "opType" : "QUERY",
-  "queryDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.DeleteFromTable",
   "tableDescs" : [ {
@@ -310,27 +278,6 @@
   } ],
   "opType" : "DROPTABLE",
   "queryDescs" : [ ]
-}, {
-  "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.MergeIntoIcebergTable",
-  "tableDescs" : [ {
-    "fieldName" : "targetTable",
-    "fieldExtractor" : "DataSourceV2RelationTableExtractor",
-    "columnDesc" : null,
-    "actionTypeDesc" : {
-      "fieldName" : null,
-      "fieldExtractor" : null,
-      "actionType" : "UPDATE"
-    },
-    "tableTypeDesc" : null,
-    "catalogDesc" : null,
-    "isInput" : false,
-    "setCurrentDatabaseIfMissing" : false
-  } ],
-  "opType" : "QUERY",
-  "queryDescs" : [ {
-    "fieldName" : "sourceTable",
-    "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.MergeIntoTable",
   "tableDescs" : [ {
@@ -614,45 +561,6 @@
   } ],
   "opType" : "TRUNCATETABLE",
   "queryDescs" : [ ]
-}, {
-  "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.UnresolvedMergeIntoIcebergTable",
-  "tableDescs" : [ {
-    "fieldName" : "targetTable",
-    "fieldExtractor" : "DataSourceV2RelationTableExtractor",
-    "columnDesc" : null,
-    "actionTypeDesc" : {
-      "fieldName" : null,
-      "fieldExtractor" : null,
-      "actionType" : "UPDATE"
-    },
-    "tableTypeDesc" : null,
-    "catalogDesc" : null,
-    "isInput" : false,
-    "setCurrentDatabaseIfMissing" : false
-  } ],
-  "opType" : "QUERY",
-  "queryDescs" : [ {
-    "fieldName" : "sourceTable",
-    "fieldExtractor" : "LogicalPlanQueryExtractor"
-  } ]
-}, {
-  "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.UpdateIcebergTable",
-  "tableDescs" : [ {
-    "fieldName" : "table",
-    "fieldExtractor" : "DataSourceV2RelationTableExtractor",
-    "columnDesc" : null,
-    "actionTypeDesc" : {
-      "fieldName" : null,
-      "fieldExtractor" : null,
-      "actionType" : "UPDATE"
-    },
-    "tableTypeDesc" : null,
-    "catalogDesc" : null,
-    "isInput" : false,
-    "setCurrentDatabaseIfMissing" : false
-  } ],
-  "opType" : "QUERY",
-  "queryDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.UpdateTable",
   "tableDescs" : [ {
@@ -1418,6 +1326,98 @@
     "fieldName" : "query",
     "fieldExtractor" : "LogicalPlanQueryExtractor"
   } ]
+}, {
+  "classname" : "org.apache.spark.sql.catalyst.plans.logical.Call",
+  "tableDescs" : [ {
+    "fieldName" : "args",
+    "fieldExtractor" : "ExpressionSeqTableExtractor",
+    "columnDesc" : null,
+    "actionTypeDesc" : null,
+    "tableTypeDesc" : null,
+    "catalogDesc" : null,
+    "isInput" : false,
+    "setCurrentDatabaseIfMissing" : false
+  } ],
+  "opType" : "ALTERTABLE_PROPERTIES",
+  "queryDescs" : [ ]
+}, {
+  "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.DeleteFromIcebergTable",
+  "tableDescs" : [ {
+    "fieldName" : "table",
+    "fieldExtractor" : "DataSourceV2RelationTableExtractor",
+    "columnDesc" : null,
+    "actionTypeDesc" : {
+      "fieldName" : null,
+      "fieldExtractor" : null,
+      "actionType" : "UPDATE"
+    },
+    "tableTypeDesc" : null,
+    "catalogDesc" : null,
+    "isInput" : false,
+    "setCurrentDatabaseIfMissing" : false
+  } ],
+  "opType" : "QUERY",
+  "queryDescs" : [ ]
+}, {
+  "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.MergeIntoIcebergTable",
+  "tableDescs" : [ {
+    "fieldName" : "targetTable",
+    "fieldExtractor" : "DataSourceV2RelationTableExtractor",
+    "columnDesc" : null,
+    "actionTypeDesc" : {
+      "fieldName" : null,
+      "fieldExtractor" : null,
+      "actionType" : "UPDATE"
+    },
+    "tableTypeDesc" : null,
+    "catalogDesc" : null,
+    "isInput" : false,
+    "setCurrentDatabaseIfMissing" : false
+  } ],
+  "opType" : "QUERY",
+  "queryDescs" : [ {
+    "fieldName" : "sourceTable",
+    "fieldExtractor" : "LogicalPlanQueryExtractor"
+  } ]
+}, {
+  "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.UnresolvedMergeIntoIcebergTable",
+  "tableDescs" : [ {
+    "fieldName" : "targetTable",
+    "fieldExtractor" : "DataSourceV2RelationTableExtractor",
+    "columnDesc" : null,
+    "actionTypeDesc" : {
+      "fieldName" : null,
+      "fieldExtractor" : null,
+      "actionType" : "UPDATE"
+    },
+    "tableTypeDesc" : null,
+    "catalogDesc" : null,
+    "isInput" : false,
+    "setCurrentDatabaseIfMissing" : false
+  } ],
+  "opType" : "QUERY",
+  "queryDescs" : [ {
+    "fieldName" : "sourceTable",
+    "fieldExtractor" : "LogicalPlanQueryExtractor"
+  } ]
+}, {
+  "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.UpdateIcebergTable",
+  "tableDescs" : [ {
+    "fieldName" : "table",
+    "fieldExtractor" : "DataSourceV2RelationTableExtractor",
+    "columnDesc" : null,
+    "actionTypeDesc" : {
+      "fieldName" : null,
+      "fieldExtractor" : null,
+      "actionType" : "UPDATE"
+    },
+    "tableTypeDesc" : null,
+    "catalogDesc" : null,
+    "isInput" : false,
+    "setCurrentDatabaseIfMissing" : false
+  } ],
+  "opType" : "QUERY",
+  "queryDescs" : [ ]
 }, {
   "classname" : 
"org.apache.spark.sql.hudi.command.AlterHoodieTableAddColumnsCommand",
   "tableDescs" : [ {
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala
index 1b2d330d1..007850f68 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala
@@ -42,22 +42,22 @@ import org.apache.kyuubi.util.AssertionUtils._
 class JsonSpecFileGenerator extends AnyFunSuite {
   // scalastyle:on
   test("check spec json files") {
-    writeCommandSpecJson("database", DatabaseCommands.data)
-    writeCommandSpecJson("table", TableCommands.data ++ IcebergCommands.data 
++ HudiCommands.data)
-    writeCommandSpecJson("function", FunctionCommands.data)
-    writeCommandSpecJson("scan", Scans.data)
+    writeCommandSpecJson("database", Seq(DatabaseCommands.data))
+    writeCommandSpecJson("table", Seq(TableCommands.data, 
IcebergCommands.data, HudiCommands.data))
+    writeCommandSpecJson("function", Seq(FunctionCommands.data))
+    writeCommandSpecJson("scan", Seq(Scans.data))
   }
 
   def writeCommandSpecJson[T <: CommandSpec](
       commandType: String,
-      specArr: Array[T]): Unit = {
+      specArr: Seq[Array[T]]): Unit = {
     val pluginHome = 
getClass.getProtectionDomain.getCodeSource.getLocation.getPath
       .split("target").head
     val filename = s"${commandType}_command_spec.json"
     val filePath = Paths.get(pluginHome, "src", "main", "resources", filename)
 
     val generatedStr = mapper.writerWithDefaultPrettyPrinter()
-      .writeValueAsString(specArr.sortBy(_.classname))
+      .writeValueAsString(specArr.flatMap(_.sortBy(_.classname)))
 
     if (sys.env.get("KYUUBI_UPDATE").contains("1")) {
       // scalastyle:off println

Reply via email to