This is an automated email from the ASF dual-hosted git repository.

bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 3e8d80f3d [KYUUBI #5543] [AUTHZ] Generalize command specs and check 
duplicated class names of command spec
3e8d80f3d is described below

commit 3e8d80f3dd0bd4678c1e22e10f27afdf4b45488a
Author: liangbowen <[email protected]>
AuthorDate: Fri Oct 27 13:17:51 2023 +0800

    [KYUUBI #5543] [AUTHZ] Generalize command specs and check duplicated class 
names of command spec
    
    ### _Why are the changes needed?_
    
    - Generalizing command specs for the commands of tables (including 
lakehouse tables), functions, db, scans
    - Checking duplicated class names of command spes, to prevent breaking the 
serde design
    - Fix the output message for spec count written to file
    
    Error message when there are duplicated class names in command specs:
    <img width="651" alt="image" 
src="https://github.com/apache/kyuubi/assets/1935105/315aed81-066e-4cfb-a110-89504e0eac0f";>
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [x] Add screenshots for manual tests if appropriate
    
    - [x] [Run 
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
 locally before make a pull request
    
    ### _Was this patch authored or co-authored using generative AI tooling?_
    
    No.
    
    Closes #5543 from bowenliang123/authz-command-specs.
    
    Closes #5543
    
    85f1e1bf7 [liangbowen] nit
    1d0b8578c [liangbowen] command specs
    
    Authored-by: liangbowen <[email protected]>
    Signed-off-by: Bowen Liang <[email protected]>
---
 .../plugin/spark/authz/serde/CommandSpec.scala      |  4 ++++
 .../plugin/spark/authz/gen/DatabaseCommands.scala   |  4 ++--
 .../plugin/spark/authz/gen/FunctionCommands.scala   |  6 +++---
 .../plugin/spark/authz/gen/HudiCommands.scala       |  4 ++--
 .../plugin/spark/authz/gen/IcebergCommands.scala    |  4 ++--
 .../spark/authz/gen/JsonSpecFileGenerator.scala     | 21 +++++++++++++--------
 .../kyuubi/plugin/spark/authz/gen/Scans.scala       |  4 ++--
 .../plugin/spark/authz/gen/TableCommands.scala      |  4 ++--
 8 files changed, 30 insertions(+), 21 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/CommandSpec.scala
 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/CommandSpec.scala
index 32ad30e21..22bf07bfa 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/CommandSpec.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/CommandSpec.scala
@@ -43,6 +43,10 @@ trait CommandSpec extends {
   final def operationType: OperationType = OperationType.withName(opType)
 }
 
+trait CommandSpecs[T <: CommandSpec] {
+  def specs: Seq[T]
+}
+
 /**
  * A specification describe a database command
  *
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala
index a61c142ed..4436d9566 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/DatabaseCommands.scala
@@ -20,7 +20,7 @@ package org.apache.kyuubi.plugin.spark.authz.gen
 import org.apache.kyuubi.plugin.spark.authz.OperationType._
 import org.apache.kyuubi.plugin.spark.authz.serde._
 
-object DatabaseCommands {
+object DatabaseCommands extends CommandSpecs[DatabaseCommandSpec] {
 
   val AlterDatabaseProperties = {
     DatabaseCommandSpec(
@@ -141,7 +141,7 @@ object DatabaseCommands {
     DatabaseCommandSpec(cmd, Seq(databaseDesc), DESCDATABASE)
   }
 
-  val data: Array[DatabaseCommandSpec] = Array(
+  override def specs: Seq[DatabaseCommandSpec] = Seq(
     AlterDatabaseProperties,
     AlterDatabaseProperties.copy(
       classname = 
"org.apache.spark.sql.execution.command.AlterDatabaseSetLocationCommand",
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/FunctionCommands.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/FunctionCommands.scala
index 1822e80fc..d5c849dd6 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/FunctionCommands.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/FunctionCommands.scala
@@ -21,7 +21,7 @@ import org.apache.kyuubi.plugin.spark.authz.OperationType._
 import org.apache.kyuubi.plugin.spark.authz.serde._
 import org.apache.kyuubi.plugin.spark.authz.serde.FunctionType.{SYSTEM, TEMP}
 
-object FunctionCommands {
+object FunctionCommands extends CommandSpecs[FunctionCommandSpec] {
 
   val CreateFunction = {
     val cmd = "org.apache.spark.sql.execution.command.CreateFunctionCommand"
@@ -83,9 +83,9 @@ object FunctionCommands {
     FunctionCommandSpec(cmd, Seq(functionDesc), RELOADFUNCTION)
   }
 
-  val data: Array[FunctionCommandSpec] = Array(
+  override def specs: Seq[FunctionCommandSpec] = Seq(
     CreateFunction,
     DropFunction,
     DescribeFunction,
-    RefreshFunction).sortBy(_.classname)
+    RefreshFunction)
 }
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala
index 9b843b1f6..9d80ee0f4 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/HudiCommands.scala
@@ -22,7 +22,7 @@ import 
org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectActionType._
 import org.apache.kyuubi.plugin.spark.authz.serde._
 import org.apache.kyuubi.plugin.spark.authz.serde.TableType._
 
-object HudiCommands {
+object HudiCommands extends CommandSpecs[TableCommandSpec] {
   val AlterHoodieTableAddColumnsCommand = {
     val cmd = 
"org.apache.spark.sql.hudi.command.AlterHoodieTableAddColumnsCommand"
     val columnDesc = ColumnDesc("colsToAdd", 
classOf[StructFieldSeqColumnExtractor])
@@ -242,7 +242,7 @@ object HudiCommands {
           setCurrentDatabaseIfMissing = true)))
   }
 
-  val data: Array[TableCommandSpec] = Array(
+  override def specs: Seq[TableCommandSpec] = Seq(
     AlterHoodieTableAddColumnsCommand,
     AlterHoodieTableChangeColumnCommand,
     AlterHoodieTableDropPartitionCommand,
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala
index fb195b455..59f8eb7a6 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/IcebergCommands.scala
@@ -21,7 +21,7 @@ import org.apache.kyuubi.plugin.spark.authz.OperationType
 import org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectActionType._
 import org.apache.kyuubi.plugin.spark.authz.serde._
 
-object IcebergCommands {
+object IcebergCommands extends CommandSpecs[TableCommandSpec] {
 
   val DeleteFromIcebergTable = {
     val cmd = 
"org.apache.spark.sql.catalyst.plans.logical.DeleteFromIcebergTable"
@@ -56,7 +56,7 @@ object IcebergCommands {
     TableCommandSpec(cmd, Seq(td), opType = 
OperationType.ALTERTABLE_PROPERTIES)
   }
 
-  val data: Array[TableCommandSpec] = Array(
+  override def specs: Seq[TableCommandSpec] = Seq(
     CallProcedure,
     DeleteFromIcebergTable,
     UpdateIcebergTable,
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala
index 007850f68..07a8e2852 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/JsonSpecFileGenerator.scala
@@ -24,6 +24,7 @@ import java.nio.file.{Files, Paths, StandardOpenOption}
 import org.scalatest.funsuite.AnyFunSuite
 
 import org.apache.kyuubi.plugin.spark.authz.serde.{mapper, CommandSpec}
+import org.apache.kyuubi.plugin.spark.authz.serde.CommandSpecs
 import org.apache.kyuubi.util.AssertionUtils._
 
 /**
@@ -42,26 +43,30 @@ import org.apache.kyuubi.util.AssertionUtils._
 class JsonSpecFileGenerator extends AnyFunSuite {
   // scalastyle:on
   test("check spec json files") {
-    writeCommandSpecJson("database", Seq(DatabaseCommands.data))
-    writeCommandSpecJson("table", Seq(TableCommands.data, 
IcebergCommands.data, HudiCommands.data))
-    writeCommandSpecJson("function", Seq(FunctionCommands.data))
-    writeCommandSpecJson("scan", Seq(Scans.data))
+    writeCommandSpecJson("database", Seq(DatabaseCommands))
+    writeCommandSpecJson("table", Seq(TableCommands, IcebergCommands, 
HudiCommands))
+    writeCommandSpecJson("function", Seq(FunctionCommands))
+    writeCommandSpecJson("scan", Seq(Scans))
   }
 
   def writeCommandSpecJson[T <: CommandSpec](
       commandType: String,
-      specArr: Seq[Array[T]]): Unit = {
+      specsArr: Seq[CommandSpecs[T]]): Unit = {
     val pluginHome = 
getClass.getProtectionDomain.getCodeSource.getLocation.getPath
       .split("target").head
     val filename = s"${commandType}_command_spec.json"
     val filePath = Paths.get(pluginHome, "src", "main", "resources", filename)
 
-    val generatedStr = mapper.writerWithDefaultPrettyPrinter()
-      .writeValueAsString(specArr.flatMap(_.sortBy(_.classname)))
+    val allSpecs = specsArr.flatMap(_.specs.sortBy(_.classname))
+    val duplicatedClassnames = allSpecs.groupBy(_.classname).values
+      .filter(_.size > 1).flatMap(specs => specs.map(_.classname)).toSet
+    withClue(s"Unexpected duplicated classnames: $duplicatedClassnames")(
+      assertResult(0)(duplicatedClassnames.size))
+    val generatedStr = 
mapper.writerWithDefaultPrettyPrinter().writeValueAsString(allSpecs)
 
     if (sys.env.get("KYUUBI_UPDATE").contains("1")) {
       // scalastyle:off println
-      println(s"writing ${specArr.length} specs to $filename")
+      println(s"writing ${allSpecs.length} specs to $filename")
       // scalastyle:on println
       Files.write(
         filePath,
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/Scans.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/Scans.scala
index b2c1868a2..d61e2606d 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/Scans.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/Scans.scala
@@ -20,7 +20,7 @@ package org.apache.kyuubi.plugin.spark.authz.gen
 import org.apache.kyuubi.plugin.spark.authz.serde._
 import org.apache.kyuubi.plugin.spark.authz.serde.FunctionType._
 
-object Scans {
+object Scans extends CommandSpecs[ScanSpec] {
 
   val HiveTableRelation = {
     val r = "org.apache.spark.sql.catalyst.catalog.HiveTableRelation"
@@ -79,7 +79,7 @@ object Scans {
 
   val HiveGenericUDTF = HiveSimpleUDF.copy(classname = 
"org.apache.spark.sql.hive.HiveGenericUDTF")
 
-  val data: Array[ScanSpec] = Array(
+  override def specs: Seq[ScanSpec] = Seq(
     HiveTableRelation,
     LogicalRelation,
     DataSourceV2Relation,
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
index 9893953af..3abf336cd 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
@@ -22,7 +22,7 @@ import 
org.apache.kyuubi.plugin.spark.authz.PrivilegeObjectActionType._
 import org.apache.kyuubi.plugin.spark.authz.serde._
 import org.apache.kyuubi.plugin.spark.authz.serde.TableType._
 
-object TableCommands {
+object TableCommands extends CommandSpecs[TableCommandSpec] {
   // table extractors
   val tite = classOf[TableIdentifierTableExtractor]
   val tableNameDesc = TableDesc("tableName", tite)
@@ -594,7 +594,7 @@ object TableCommands {
     TableCommandSpec(cmd, Seq(tableIdentDesc.copy(isInput = true)))
   }
 
-  val data: Array[TableCommandSpec] = Array(
+  override def specs: Seq[TableCommandSpec] = Seq(
     AddPartitions,
     DropPartitions,
     RenamePartitions,

Reply via email to