This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new e08750a4c [KYUUBI #5757][AUTHZ] Support alter path-based table for 
Delta Lake in Authz
e08750a4c is described below

commit e08750a4cbd777085427ca9252d89315871f355a
Author: zml1206 <[email protected]>
AuthorDate: Tue Nov 28 15:51:15 2023 +0800

    [KYUUBI #5757][AUTHZ] Support alter path-based table for Delta Lake in Authz
    
    # :mag: Description
    ## Issue References ๐Ÿ”—
    
    This pull request fixes #5757
    
    ## Describe Your Solution ๐Ÿ”ง
    
    Add uriDescs.
    
    ## Types of changes :bookmark:
    
    - [ ] Bugfix (non-breaking change which fixes an issue)
    - [x] New feature (non-breaking change which adds functionality)
    - [ ] Breaking change (fix or feature that would cause existing 
functionality to change)
    
    ## Test Plan ๐Ÿงช
    
    #### Behavior Without This Pull Request :coffin:
    
    #### Behavior With This Pull Request :tada:
    
    #### Related Unit Tests
    
    ---
    
    # Checklists
    ## ๐Ÿ“ Author Self Checklist
    
    - [x] My code follows the [style 
guidelines](https://kyuubi.readthedocs.io/en/master/contributing/code/style.html)
 of this project
    - [x] I have performed a self-review
    - [x] I have commented my code, particularly in hard-to-understand areas
    - [ ] I have made corresponding changes to the documentation
    - [x] My changes generate no new warnings
    - [x] I have added tests that prove my fix is effective or that my feature 
works
    - [ ] New and existing unit tests pass locally with my changes
    - [x] This patch was not authored or co-authored using [Generative 
Tooling](https://www.apache.org/legal/generative-tooling.html)
    
    ## ๐Ÿ“ Committer Pre-Merge Checklist
    
    - [x] Pull request title is okay.
    - [x] No license issues.
    - [x] Milestone correctly set?
    - [x] Test coverage is ok
    - [x] Assignees are selected.
    - [x] Minimum number of approvals
    - [x] No changes are requested
    
    **Be nice. Be informative.**
    
    Closes #5760 from zml1206/KYUUBI-5757.
    
    Closes #5757
    
    d5d0dc3cc [zml1206] Support alter path-based table for Delta Lake in Authz
    
    Authored-by: zml1206 <[email protected]>
    Signed-off-by: Kent Yao <[email protected]>
---
 .../src/main/resources/table_command_spec.json     |  42 +++++++--
 .../plugin/spark/authz/gen/TableCommands.scala     |  13 ++-
 .../DeltaCatalogRangerSparkExtensionSuite.scala    | 102 ++++++++++++++++++++-
 3 files changed, 143 insertions(+), 14 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
index 027f0a4e0..27b205309 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
+++ 
b/extensions/spark/kyuubi-spark-authz/src/main/resources/table_command_spec.json
@@ -13,7 +13,11 @@
   } ],
   "opType" : "ALTERTABLE_ADDCOLS",
   "queryDescs" : [ ],
-  "uriDescs" : [ ]
+  "uriDescs" : [ {
+    "fieldName" : "child",
+    "fieldExtractor" : "ResolvedTableURIExtractor",
+    "isInput" : false
+  } ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.AddPartitions",
   "tableDescs" : [ {
@@ -45,7 +49,11 @@
   } ],
   "opType" : "ALTERTABLE_ADDCOLS",
   "queryDescs" : [ ],
-  "uriDescs" : [ ]
+  "uriDescs" : [ {
+    "fieldName" : "child",
+    "fieldExtractor" : "ResolvedTableURIExtractor",
+    "isInput" : false
+  } ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.AlterTable",
   "tableDescs" : [ {
@@ -61,7 +69,11 @@
   } ],
   "opType" : "ALTERTABLE_PROPERTIES",
   "queryDescs" : [ ],
-  "uriDescs" : [ ]
+  "uriDescs" : [ {
+    "fieldName" : "ident",
+    "fieldExtractor" : "IdentifierURIExtractor",
+    "isInput" : false
+  } ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.AppendData",
   "tableDescs" : [ {
@@ -319,7 +331,11 @@
   } ],
   "opType" : "ALTERTABLE_ADDCOLS",
   "queryDescs" : [ ],
-  "uriDescs" : [ ]
+  "uriDescs" : [ {
+    "fieldName" : "child",
+    "fieldExtractor" : "ResolvedTableURIExtractor",
+    "isInput" : false
+  } ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.DropPartitions",
   "tableDescs" : [ {
@@ -478,7 +494,11 @@
   } ],
   "opType" : "ALTERTABLE_RENAMECOL",
   "queryDescs" : [ ],
-  "uriDescs" : [ ]
+  "uriDescs" : [ {
+    "fieldName" : "child",
+    "fieldExtractor" : "ResolvedTableURIExtractor",
+    "isInput" : false
+  } ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.RenamePartitions",
   "tableDescs" : [ {
@@ -526,7 +546,11 @@
   } ],
   "opType" : "ALTERTABLE_REPLACECOLS",
   "queryDescs" : [ ],
-  "uriDescs" : [ ]
+  "uriDescs" : [ {
+    "fieldName" : "child",
+    "fieldExtractor" : "ResolvedTableURIExtractor",
+    "isInput" : false
+  } ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.ReplaceData",
   "tableDescs" : [ {
@@ -676,7 +700,11 @@
   } ],
   "opType" : "ALTERTABLE_PROPERTIES",
   "queryDescs" : [ ],
-  "uriDescs" : [ ]
+  "uriDescs" : [ {
+    "fieldName" : "table",
+    "fieldExtractor" : "ResolvedTableURIExtractor",
+    "isInput" : false
+  } ]
 }, {
   "classname" : "org.apache.spark.sql.catalyst.plans.logical.ShowCreateTable",
   "tableDescs" : [ {
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
index ee0345a8c..aced937b9 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/gen/TableCommands.scala
@@ -39,7 +39,8 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
   val AlterTable = {
     val cmd = "org.apache.spark.sql.catalyst.plans.logical.AlterTable"
     val tableDesc = TableDesc("ident", classOf[IdentifierTableExtractor])
-    TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES)
+    val uriDescs = Seq(UriDesc("ident", classOf[IdentifierURIExtractor]))
+    TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES, uriDescs = 
uriDescs)
   }
 
   val AlterTableAddColumns = {
@@ -51,7 +52,8 @@ object TableCommands extends CommandSpecs[TableCommandSpec] {
 
   val AddColumns = {
     val cmd = "org.apache.spark.sql.catalyst.plans.logical.AddColumns"
-    TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_ADDCOLS)
+    val uriDescs = Seq(UriDesc("child", classOf[ResolvedTableURIExtractor]))
+    TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_ADDCOLS, uriDescs 
= uriDescs)
   }
 
   val AlterColumn = {
@@ -66,12 +68,12 @@ object TableCommands extends CommandSpecs[TableCommandSpec] 
{
 
   val ReplaceColumns = {
     val cmd = "org.apache.spark.sql.catalyst.plans.logical.ReplaceColumns"
-    TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_REPLACECOLS)
+    AddColumns.copy(classname = cmd, opType = ALTERTABLE_REPLACECOLS)
   }
 
   val RenameColumn = {
     val cmd = "org.apache.spark.sql.catalyst.plans.logical.RenameColumn"
-    TableCommandSpec(cmd, Seq(resolvedTableDesc), ALTERTABLE_RENAMECOL)
+    AddColumns.copy(classname = cmd, opType = ALTERTABLE_RENAMECOL)
   }
 
   val AlterTableAddPartition = {
@@ -638,7 +640,8 @@ object TableCommands extends CommandSpecs[TableCommandSpec] 
{
   val SetTableProperties = {
     val cmd = "org.apache.spark.sql.catalyst.plans.logical.SetTableProperties"
     val tableDesc = TableDesc("table", classOf[ResolvedTableTableExtractor])
-    TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES)
+    val uriDescs = Seq(UriDesc("table", classOf[ResolvedTableURIExtractor]))
+    TableCommandSpec(cmd, Seq(tableDesc), ALTERTABLE_PROPERTIES, uriDescs = 
uriDescs)
   }
 
   val AddArchivesCommand = {
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
index c1dda6989..1ce8ad676 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
@@ -25,7 +25,7 @@ import 
org.apache.kyuubi.plugin.spark.authz.AccessControlException
 import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
 import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
 import 
org.apache.kyuubi.plugin.spark.authz.ranger.DeltaCatalogRangerSparkExtensionSuite._
-import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils.isSparkV32OrGreater
+import 
org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils.{isSparkV32OrGreater, 
isSparkV35OrGreater}
 import org.apache.kyuubi.tags.DeltaTest
 import org.apache.kyuubi.util.AssertionUtils._
 
@@ -41,6 +41,14 @@ class DeltaCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
   val table1 = "table1_delta"
   val table2 = "table2_delta"
 
+  def propString(props: Map[String, String]): String =
+    if (props.isEmpty) ""
+    else {
+      props
+        .map { case (key, value) => s"'$key' = '$value'" }
+        .mkString("TBLPROPERTIES (", ",", ")")
+    }
+
   def createTableSql(namespace: String, table: String): String =
     s"""
        |CREATE TABLE IF NOT EXISTS $namespace.$table (
@@ -53,7 +61,7 @@ class DeltaCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
        |PARTITIONED BY (gender)
        |""".stripMargin
 
-  def createPathBasedTableSql(path: Path): String =
+  def createPathBasedTableSql(path: Path, props: Map[String, String] = 
Map.empty): String =
     s"""
        |CREATE TABLE IF NOT EXISTS delta.`$path` (
        |  id INT,
@@ -63,6 +71,7 @@ class DeltaCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
        |)
        |USING DELTA
        |PARTITIONED BY (gender)
+       |${propString(props)}
        |""".stripMargin
 
   override def withFixture(test: NoArgTest): Outcome = {
@@ -468,6 +477,95 @@ class DeltaCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
       doAs(admin, sql(vacuumTableSql2))
     })
   }
+
+  test("alter path-based table set properties") {
+    withTempDir(path => {
+      doAs(admin, sql(createPathBasedTableSql(path)))
+      val setPropertiesSql = s"ALTER TABLE delta.`$path`" +
+        s" SET TBLPROPERTIES ('delta.appendOnly' = 'true')"
+      interceptEndsWith[AccessControlException](
+        doAs(someone, sql(setPropertiesSql)))(
+        s"does not have [write] privilege on [[$path, $path/]]")
+      doAs(admin, sql(setPropertiesSql))
+    })
+  }
+
+  test("alter path-based table add columns") {
+    withTempDir(path => {
+      doAs(admin, sql(createPathBasedTableSql(path)))
+      val addColumnsSql = s"ALTER TABLE delta.`$path` ADD COLUMNS (age int)"
+      interceptEndsWith[AccessControlException](
+        doAs(someone, sql(addColumnsSql)))(
+        s"does not have [write] privilege on [[$path, $path/]]")
+      doAs(admin, sql(addColumnsSql))
+    })
+  }
+
+  test("alter path-based table change column") {
+    withTempDir(path => {
+      doAs(admin, sql(createPathBasedTableSql(path)))
+      val changeColumnSql = s"ALTER TABLE delta.`$path`" +
+        s" CHANGE COLUMN gender gender STRING AFTER birthDate"
+      interceptEndsWith[AccessControlException](
+        doAs(someone, sql(changeColumnSql)))(
+        s"does not have [write] privilege on [[$path, $path/]]")
+      doAs(admin, sql(changeColumnSql))
+    })
+  }
+
+  test("alter path-based table drop column") {
+    assume(
+      isSparkV32OrGreater,
+      "alter table drop column is available in Delta Lake 1.2.0 and above")
+
+    withTempDir(path => {
+      doAs(admin, sql(createPathBasedTableSql(path, 
Map("delta.columnMapping.mode" -> "name"))))
+      val dropColumnSql = s"ALTER TABLE delta.`$path` DROP COLUMN birthDate"
+      interceptEndsWith[AccessControlException](
+        doAs(someone, sql(dropColumnSql)))(
+        s"does not have [write] privilege on [[$path, $path/]]")
+      doAs(admin, sql(dropColumnSql))
+    })
+  }
+
+  test("alter path-based table rename column") {
+    assume(
+      isSparkV32OrGreater,
+      "alter table rename column is available in Delta Lake 1.2.0 and above")
+
+    withTempDir(path => {
+      doAs(admin, sql(createPathBasedTableSql(path, 
Map("delta.columnMapping.mode" -> "name"))))
+      val renameColumnSql = s"ALTER TABLE delta.`$path`" +
+        s" RENAME COLUMN birthDate TO dateOfBirth"
+      interceptEndsWith[AccessControlException](
+        doAs(someone, sql(renameColumnSql)))(
+        s"does not have [write] privilege on [[$path, $path/]]")
+      doAs(admin, sql(renameColumnSql))
+    })
+  }
+
+  test("alter path-based table replace columns") {
+    withTempDir(path => {
+      assume(
+        isSparkV32OrGreater,
+        "alter table replace columns is not available in Delta Lake 1.0.1")
+
+      doAs(admin, sql(createPathBasedTableSql(path, 
Map("delta.columnMapping.mode" -> "name"))))
+      val replaceColumnsSql = s"ALTER TABLE delta.`$path`" +
+        s" REPLACE COLUMNS (id INT, name STRING, gender STRING)"
+      interceptEndsWith[AccessControlException](
+        doAs(someone, sql(replaceColumnsSql)))(
+        s"does not have [write] privilege on [[$path, $path/]]")
+
+      // There was a bug before Delta Lake 3.0, it will throw 
AnalysisException message
+      // "Cannot drop column from a struct type with a single field:
+      // StructType(StructField(birthDate,TimestampType,true))".
+      // For details, see https://github.com/delta-io/delta/pull/1822
+      if (isSparkV35OrGreater) {
+        doAs(admin, sql(replaceColumnsSql))
+      }
+    })
+  }
 }
 
 object DeltaCatalogRangerSparkExtensionSuite {

Reply via email to