This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 851178ce9a [KYUUBI #6940] Test Unset Table Properties Command
851178ce9a is described below

commit 851178ce9a50f67f0bdc04c4216bf098c72cbbce
Author: davidyuan <[email protected]>
AuthorDate: Wed Mar 5 13:37:39 2025 +0800

    [KYUUBI #6940] Test Unset Table Properties Command
    
    ### Why are the changes needed?
    
    Currently range check missing check UnsetTableProperties command, we need 
add it to the range check.
    #6940
    
    ### How was this patch tested?
    
    Use paimon removing table properties to test this command
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #6944 from davidyuan1223/test_remove_table_properties.
    
    Closes #6940
    
    4f24d7d6a [davidyuan] Merge branch 'master' into 
test_remove_table_properties
    11d3773ed [davidyuan] test unset table properties command
    
    Authored-by: davidyuan <[email protected]>
    Signed-off-by: Kent Yao <[email protected]>
---
 .../src/main/resources/table_command_spec.json     | 16 +++++++++++++
 .../plugin/spark/authz/gen/TableCommands.scala     |  6 +++++
 .../PaimonCatalogRangerSparkExtensionSuite.scala   | 28 ++++++++++++++++++++++
 3 files changed, 50 insertions(+)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index d707a09201..fcbae2a4fd 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -776,6 +776,22 @@
   "opType" : "TRUNCATETABLE",
   "queryDescs" : [ ],
   "uriDescs" : [ ]
+}, {
+  "classname" : 
"org.apache.spark.sql.catalyst.plans.logical.UnsetTableProperties",
+  "tableDescs" : [ {
+    "fieldName" : "child",
+    "fieldExtractor" : "ResolvedTableTableExtractor",
+    "columnDesc" : null,
+    "actionTypeDesc" : null,
+    "tableTypeDesc" : null,
+    "catalogDesc" : null,
+    "isInput" : false,
+    "setCurrentDatabaseIfMissing" : false,
+    "comment" : ""
+  } ],
+  "opType" : "ALTERTABLE_PROPERTIES",
+  "queryDescs" : [ ],
+  "uriDescs" : [ ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.UpdateTable",
   "tableDescs" : [ {
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
index 1b88123c75..9f0d32ba93 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
@@ -167,6 +167,11 @@ object TableCommands extends 
CommandSpecs[TableCommandSpec] {
     Seq(tableNameDesc),
     ALTERTABLE_PROPERTIES)
 
+  val UnsetTableProperties = TableCommandSpec(
+    "org.apache.spark.sql.catalyst.plans.logical.UnsetTableProperties",
+    Seq(resolvedTableDesc),
+    ALTERTABLE_PROPERTIES)
+
   val AlterTableUnsetProperties = AlterTableSetProperties.copy(classname =
     "org.apache.spark.sql.execution.command.AlterTableUnsetPropertiesCommand")
 
@@ -688,6 +693,7 @@ object TableCommands extends CommandSpecs[TableCommandSpec] 
{
     AlterTableSetLocation,
     AlterTableSetProperties,
     AlterTableUnsetProperties,
+    UnsetTableProperties,
     AlterViewAs,
     AnalyzeColumn,
     AnalyzePartition,
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
index cadb8ff246..7fe69f4434 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
@@ -224,6 +224,34 @@ class PaimonCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
     }
   }
 
+  test("REMOVING TBLEPROPERTIES") {
+    withCleanTmpResources(Seq(
+      (s"$catalogV2.$namespace1.$table1", "table"))) {
+      val createTableWithPropertiesSql =
+        s"""
+           |CREATE TABLE IF NOT EXISTS $catalogV2.$namespace1.$table1
+           |(id INT, name STRING)
+           | USING paimon
+           | TBLPROPERTIES (
+           |  'write-buffer-size' = '256 MB'
+           | )
+           | OPTIONS (
+           |  'primary-key' = 'id'
+           | )
+           |""".stripMargin
+      doAs(admin, sql(createTableWithPropertiesSql))
+      val removingTblpropertiesSql =
+        s"""
+           |ALTER TABLE $catalogV2.$namespace1.$table1 UNSET TBLPROPERTIES 
('write-buffer-size')
+           |""".stripMargin
+
+      interceptEndsWith[AccessControlException] {
+        doAs(someone, sql(removingTblpropertiesSql))
+      }(s"does not have [alter] privilege on [$namespace1/$table1]")
+      doAs(admin, sql(removingTblpropertiesSql))
+    }
+  }
+
   def createTableSql(namespace: String, table: String): String =
     s"""
        |CREATE TABLE IF NOT EXISTS $catalogV2.$namespace.$table

Reply via email to