This is an automated email from the ASF dual-hosted git repository.

bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 0e8570264 [KYUUBI #5532][AUTHZ] Support Alter table commands of 
updating table schema for Delta Lake
0e8570264 is described below

commit 0e85702648404310acf7e2f0703138e2b9e12925
Author: zml1206 <[email protected]>
AuthorDate: Mon Oct 30 10:33:10 2023 +0800

    [KYUUBI #5532][AUTHZ] Support Alter table commands of updating table schema 
for Delta Lake
    
    ### _Why are the changes needed?_
    To close #5532.
    Support alter table command for Delta Lake in Authz.
    https://docs.delta.io/latest/delta-batch.html#explicitly-update-schema
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run 
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
 locally before make a pull request
    
    ### _Was this patch authored or co-authored using generative AI tooling?_
    No.
    
    Closes #5552 from zml1206/KYUUBI-5532.
    
    Closes #5532
    
    e0c384d55 [zml1206] Delta Lake not support purge table
    c8cef4cf2 [zml1206] Support alter table command for Delta Lake
    
    Authored-by: zml1206 <[email protected]>
    Signed-off-by: Bowen Liang <[email protected]>
---
 .../plugin/spark/authz/SparkSessionProvider.scala  |  3 +-
 .../DeltaCatalogRangerSparkExtensionSuite.scala    | 70 ++++++++++++++++++++--
 2 files changed, 67 insertions(+), 6 deletions(-)

diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
index c7e541ef5..7aa4d99e4 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
@@ -28,6 +28,7 @@ import org.scalatest.Assertions._
 import org.apache.kyuubi.Utils
 import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
 import 
org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
+import 
org.apache.kyuubi.plugin.spark.authz.ranger.DeltaCatalogRangerSparkExtensionSuite._
 import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
 
 trait SparkSessionProvider {
@@ -106,7 +107,7 @@ trait SparkSessionProvider {
   }
 
   private def isCatalogSupportPurge(catalogName: String): Boolean = {
-    val unsupportedCatalogs = Set(v2JdbcTableCatalogClassName)
+    val unsupportedCatalogs = Set(v2JdbcTableCatalogClassName, 
deltaCatalogClassName)
     spark.conf.getOption(s"spark.sql.catalog.$catalogName") match {
       case Some(catalog) if !unsupportedCatalogs.contains(catalog) => true
       case _ => false
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
index 48bb5c879..59ced3eb1 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
@@ -22,6 +22,7 @@ import org.apache.kyuubi.Utils
 import org.apache.kyuubi.plugin.spark.authz.AccessControlException
 import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
 import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
+import 
org.apache.kyuubi.plugin.spark.authz.ranger.DeltaCatalogRangerSparkExtensionSuite._
 import org.apache.kyuubi.tags.DeltaTest
 import org.apache.kyuubi.util.AssertionUtils._
 
@@ -31,6 +32,7 @@ import org.apache.kyuubi.util.AssertionUtils._
 @DeltaTest
 class DeltaCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
   override protected val catalogImpl: String = "hive"
+  override protected val sqlExtensions: String = 
"io.delta.sql.DeltaSparkSessionExtension"
 
   val namespace1 = deltaNamespace
   val table1 = "table1_delta"
@@ -41,9 +43,7 @@ class DeltaCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
   }
 
   override def beforeAll(): Unit = {
-    spark.conf.set(
-      s"spark.sql.catalog.$sparkCatalog",
-      "org.apache.spark.sql.delta.catalog.DeltaCatalog")
+    spark.conf.set(s"spark.sql.catalog.$sparkCatalog", deltaCatalogClassName)
     spark.conf.set(
       s"spark.sql.catalog.$sparkCatalog.warehouse",
       Utils.createTempDir("delta-hadoop").toString)
@@ -103,8 +103,7 @@ class DeltaCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
   }
 
   test("create or replace table") {
-    withCleanTmpResources(
-      Seq((s"$namespace1.$table1", "table"), (s"$namespace1", "database"))) {
+    withCleanTmpResources(Seq((s"$namespace1.$table1", "table"), 
(s"$namespace1", "database"))) {
       val createOrReplaceTableSql =
         s"""
            |CREATE OR REPLACE TABLE $namespace1.$table1 (
@@ -124,4 +123,65 @@ class DeltaCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
       doAs(admin, createOrReplaceTableSql)
     }
   }
+
+  test("alter table") {
+    withCleanTmpResources(Seq((s"$namespace1.$table1", "table"), 
(s"$namespace1", "database"))) {
+      doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $namespace1"))
+      doAs(
+        admin,
+        sql(
+          s"""
+             |CREATE TABLE IF NOT EXISTS $namespace1.$table1 (
+             |  id INT,
+             |  firstName STRING,
+             |  middleName STRING,
+             |  lastName STRING,
+             |  gender STRING,
+             |  birthDate TIMESTAMP,
+             |  ssn STRING,
+             |  salary INT
+             |)
+             |USING DELTA
+             |PARTITIONED BY (gender)
+             |""".stripMargin))
+
+      // add columns
+      interceptContains[AccessControlException](
+        doAs(someone, sql(s"ALTER TABLE $namespace1.$table1 ADD COLUMNS (age 
int)")))(
+        s"does not have [alter] privilege on [$namespace1/$table1]")
+
+      // change column
+      interceptContains[AccessControlException](
+        doAs(
+          someone,
+          sql(s"ALTER TABLE $namespace1.$table1" +
+            s" CHANGE COLUMN gender gender STRING AFTER birthDate")))(
+        s"does not have [alter] privilege on [$namespace1/$table1]")
+
+      // replace columns
+      interceptContains[AccessControlException](
+        doAs(
+          someone,
+          sql(s"ALTER TABLE $namespace1.$table1" +
+            s" REPLACE COLUMNS (id INT, firstName STRING)")))(
+        s"does not have [alter] privilege on [$namespace1/$table1]")
+
+      // rename column
+      interceptContains[AccessControlException](
+        doAs(
+          someone,
+          sql(s"ALTER TABLE $namespace1.$table1" +
+            s" RENAME COLUMN birthDate TO dateOfBirth")))(
+        s"does not have [alter] privilege on [$namespace1/$table1]")
+
+      // drop column
+      interceptContains[AccessControlException](
+        doAs(someone, sql(s"ALTER TABLE $namespace1.$table1 DROP COLUMN 
birthDate")))(
+        s"does not have [alter] privilege on [$namespace1/$table1]")
+    }
+  }
+}
+
+object DeltaCatalogRangerSparkExtensionSuite {
+  val deltaCatalogClassName: String = 
"org.apache.spark.sql.delta.catalog.DeltaCatalog"
 }

Reply via email to