This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 31bbb536f2 [KYUUBI #7100] [#7099] Ranger Support Check Iceberg Alter 
Table Command & Change Iceberg Test Use Jdbc Catalog
31bbb536f2 is described below

commit 31bbb536f295e4c544e8a53098e3dcf85472d5f6
Author: davidyuan <yuanfuy...@mafengwo.com>
AuthorDate: Thu Jun 26 10:11:43 2025 +0800

    [KYUUBI #7100] [#7099] Ranger Support Check Iceberg Alter Table Command & 
Change Iceberg Test Use Jdbc Catalog
    
    Parent Issue #7040
    Support Check Iceberg Alter Table Command
    ### Why are the changes needed?
    
    - [x] Alter Table Rename To
    - [x] Alter Table Set Properties
    - [x] Alter Table Unset Properties
    - [x] Alter Table Add Column
    - [x] Alter Table Rename Column
    - [x] Alter Table Alter Column
    - [x] Alter Table Drop Column
    
    ### How was this patch tested?
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    Closes #7100 from davidyuan1223/iceberg_alter_table_check.
    
    Closes #7100
    
    4be2210f1 [davidyuan] update
    53eda10eb [davidyuan] update
    
    Authored-by: davidyuan <yuanfuy...@mafengwo.com>
    Signed-off-by: Kent Yao <y...@apache.org>
---
 extensions/spark/kyuubi-spark-authz/pom.xml        |  19 ++++
 .../plugin/spark/authz/MysqlContainerEnv.scala     |  39 +++++++
 .../plugin/spark/authz/SparkSessionProvider.scala  |  32 ++++--
 .../IcebergCatalogRangerSparkExtensionSuite.scala  | 115 +++++++++++++++++++--
 .../authz/ranger/RangerSparkExtensionSuite.scala   |  30 +++++-
 5 files changed, 220 insertions(+), 15 deletions(-)

diff --git a/extensions/spark/kyuubi-spark-authz/pom.xml 
b/extensions/spark/kyuubi-spark-authz/pom.xml
index f540a8a7ee..32e1654f2e 100644
--- a/extensions/spark/kyuubi-spark-authz/pom.xml
+++ b/extensions/spark/kyuubi-spark-authz/pom.xml
@@ -37,6 +37,25 @@
     </properties>
 
     <dependencies>
+
+        <dependency>
+            <groupId>com.mysql</groupId>
+            <artifactId>mysql-connector-j</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>com.dimafeng</groupId>
+            
<artifactId>testcontainers-scala-mysql_${scala.binary.version}</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>com.dimafeng</groupId>
+            
<artifactId>testcontainers-scala-scalatest_${scala.binary.version}</artifactId>
+            <scope>test</scope>
+        </dependency>
+
         <dependency>
             <groupId>org.apache.kyuubi</groupId>
             <artifactId>kyuubi-util-scala_${scala.binary.version}</artifactId>
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/MysqlContainerEnv.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/MysqlContainerEnv.scala
new file mode 100644
index 0000000000..ef57604a6f
--- /dev/null
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/MysqlContainerEnv.scala
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kyuubi.plugin.spark.authz
+
+import com.dimafeng.testcontainers.MySQLContainer
+import org.testcontainers.utility.DockerImageName
+
+trait MysqlContainerEnv {
+
+  val containerDef: MySQLContainer = MySQLContainer.Def(
+    dockerImageName = DockerImageName.parse("mysql:5.7"),
+    databaseName = "hive_metastore",
+    username = "root",
+    password = "123456")
+    .createContainer()
+
+  def startEngine(): Unit = {
+    containerDef.start()
+  }
+
+  def stopEngine(): Unit = {
+    containerDef.stop()
+  }
+}
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
index 7aa4d99e45..1fed6bc8b8 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
@@ -40,16 +40,20 @@ trait SparkSessionProvider {
 
   protected val extraSparkConf: SparkConf = new SparkConf()
 
+  protected val useMysqlEnv: Boolean = false
+
+  def getMysqlJdbcUrl: String = ""
+
+  def getMysqlUsername: String = ""
+
+  def getMysqlPassword: String = ""
+
+  def getDriverClassName: String = ""
+
   protected lazy val spark: SparkSession = {
-    val metastore = {
-      val path = Utils.createTempDir(prefix = "hms")
-      Files.deleteIfExists(path)
-      path
-    }
-    val ret = SparkSession.builder()
+    val sessionBuilder = SparkSession.builder()
       .master("local")
       .config("spark.ui.enabled", "false")
-      .config("javax.jdo.option.ConnectionURL", 
s"jdbc:derby:;databaseName=$metastore;create=true")
       .config("spark.sql.catalogImplementation", catalogImpl)
       .config(
         "spark.sql.warehouse.dir",
@@ -57,7 +61,19 @@ trait SparkSessionProvider {
       .config("spark.sql.extensions", sqlExtensions)
       .withExtensions(extension)
       .config(extraSparkConf)
-      .getOrCreate()
+
+    if (!useMysqlEnv) {
+      val metastore = {
+        val path = Utils.createTempDir(prefix = "hms")
+        Files.deleteIfExists(path)
+        path
+      }
+      sessionBuilder.config(
+        "javax.jdo.option.ConnectionURL",
+        s"jdbc:derby:;databaseName=$metastore;create=true")
+    }
+
+    val ret = sessionBuilder.getOrCreate()
     if (catalogImpl == "hive") {
       // Ensure HiveExternalCatalog.client.userName is defaultTableOwner
       UserGroupInformation.createRemoteUser(defaultTableOwner).doAs(
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
index 24345624b5..16a8beb22c 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/IcebergCatalogRangerSparkExtensionSuite.scala
@@ -24,8 +24,8 @@ import scala.util.Try
 import org.apache.spark.sql.Row
 import org.scalatest.Outcome
 
-// scalastyle:off
 import org.apache.kyuubi.Utils
+// scalastyle:off
 import org.apache.kyuubi.plugin.spark.authz.AccessControlException
 import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
 import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
@@ -39,11 +39,12 @@ import org.apache.kyuubi.util.AssertionUtils._
  */
 @IcebergTest
 class IcebergCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite {
-  override protected val catalogImpl: String = "hive"
+  override protected val useMysqlEnv: Boolean = true
+  override protected val catalogImpl: String = "in-memory"
   override protected val sqlExtensions: String =
     "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions"
 
-  val catalogV2 = "local"
+  val catalogV2 = "jdbc_catalog"
   val namespace1 = icebergNamespace
   val table1 = "table1"
   val outputTable1 = "outputTable1"
@@ -55,16 +56,20 @@ class IcebergCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite
   }
 
   override def beforeAll(): Unit = {
+    super.beforeAll()
     spark.conf.set(
       s"spark.sql.catalog.$catalogV2",
       "org.apache.iceberg.spark.SparkCatalog")
-    spark.conf.set(s"spark.sql.catalog.$catalogV2.type", "hadoop")
+    spark.conf.set(
+      s"spark.sql.catalog.$catalogV2.type",
+      "jdbc")
+    spark.conf.set(s"spark.sql.catalog.$catalogV2.uri", getMysqlJdbcUrl)
+    spark.conf.set(s"spark.sql.catalog.$catalogV2.jdbc.user", getMysqlUsername)
+    spark.conf.set(s"spark.sql.catalog.$catalogV2.jdbc.password", 
getMysqlPassword)
     spark.conf.set(
       s"spark.sql.catalog.$catalogV2.warehouse",
       Utils.createTempDir("iceberg-hadoop").toString)
 
-    super.beforeAll()
-
     doAs(admin, sql(s"CREATE DATABASE IF NOT EXISTS $catalogV2.$namespace1"))
     doAs(
       admin,
@@ -586,4 +591,102 @@ class IcebergCatalogRangerSparkExtensionSuite extends 
RangerSparkExtensionSuite
       doAs(admin, sql(dropTagSql))
     }
   }
+
+  test("RENAME TABLE for Iceberg") {
+    val table = "tablex"
+    withCleanTmpResources(Seq((table, "table"))) {
+      doAs(
+        admin,
+        sql(
+          s"CREATE TABLE $catalogV2.$namespace1.$table" +
+            s"(id int NOT NULL, name string, city string) USING iceberg"))
+      val renameSql = s"alter table $catalogV2.$namespace1.$table " +
+        s"rename to $namespace1.new_table"
+      interceptEndsWith[AccessControlException] {
+        doAs(someone, sql(renameSql))
+      }(s"does not have [alter] privilege on [$namespace1/tablex]")
+      doAs(admin, sql(renameSql))
+    }
+  }
+
+  test("ALTER TABLE CHANGE PROPERTIES for Iceberg") {
+    val table = "tablex"
+    withCleanTmpResources(Seq((s"$catalogV2.$namespace1.$table", "table"))) {
+      doAs(
+        admin,
+        sql(
+          s"CREATE TABLE $catalogV2.$namespace1.$table" +
+            s"(id int NOT NULL, name string, city string) USING iceberg"))
+      val alterTableSetPropertiesSql =
+        s"""
+           |ALTER TABLE $catalogV2.$namespace1.$table
+           |SET TBLPROPERTIES (
+           | 'read.split.target-size' = '123456'
+           |)
+           |""".stripMargin
+      val alterTableUnsetPropertiesSql =
+        s"""
+           |ALTER TABLE $catalogV2.$namespace1.$table
+           |UNSET TBLPROPERTIES (
+           | 'read.split.target-size'
+           | )
+           |""".stripMargin
+      interceptEndsWith[AccessControlException] {
+        doAs(someone, sql(alterTableSetPropertiesSql))
+      }(s"does not have [alter] privilege on [$namespace1/tablex]")
+      doAs(admin, sql(alterTableSetPropertiesSql))
+      interceptEndsWith[AccessControlException] {
+        doAs(someone, sql(alterTableUnsetPropertiesSql))
+      }(s"does not have [alter] privilege on [$namespace1/tablex]")
+      doAs(admin, sql(alterTableUnsetPropertiesSql))
+    }
+  }
+
+  test("ALTER TABLE CHANGE COLUMN for Iceberg") {
+    val table = "tablex"
+    withCleanTmpResources(Seq((s"$catalogV2.$namespace1.$table", "table"))) {
+      doAs(
+        admin,
+        sql(
+          s"CREATE TABLE $catalogV2.$namespace1.$table" +
+            s"(id int NOT NULL, name string, city string) USING iceberg"))
+      val alterTableAddColumnSql =
+        s"""
+           |ALTER TABLE $catalogV2.$namespace1.$table
+           |ADD COLUMN country int;
+           |""".stripMargin
+      val alterTableRenameColumnSql =
+        s"""
+           |ALTER TABLE $catalogV2.$namespace1.$table
+           |RENAME COLUMN country to country_code;
+           |""".stripMargin
+      val alterTableAlterColumnSql =
+        s"""
+           |ALTER TABLE $catalogV2.$namespace1.$table
+           |ALTER COLUMN country_code type bigint;
+           |""".stripMargin
+      val alterTableDropColumnSql =
+        s"""
+           |ALTER TABLE $catalogV2.$namespace1.$table
+           |DROP COLUMN country_code;
+           |""".stripMargin
+      interceptEndsWith[AccessControlException] {
+        doAs(someone, sql(alterTableAddColumnSql))
+      }(s"does not have [alter] privilege on [$namespace1/tablex]")
+      doAs(admin, sql(alterTableAddColumnSql))
+      interceptEndsWith[AccessControlException] {
+        doAs(someone, sql(alterTableRenameColumnSql))
+      }(s"does not have [alter] privilege on [$namespace1/tablex]")
+      doAs(admin, sql(alterTableRenameColumnSql))
+      interceptEndsWith[AccessControlException] {
+        doAs(someone, sql(alterTableAlterColumnSql))
+      }(s"does not have [alter] privilege on [$namespace1/tablex]")
+      doAs(admin, sql(alterTableAlterColumnSql))
+      interceptEndsWith[AccessControlException] {
+        doAs(someone, sql(alterTableDropColumnSql))
+      }(s"does not have [alter] privilege on [$namespace1/tablex]")
+      doAs(admin, sql(alterTableDropColumnSql))
+    }
+  }
+
 }
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
index 69491cab30..4e8cddb68c 100644
--- 
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtensionSuite.scala
@@ -38,6 +38,7 @@ import org.scalatest.funsuite.AnyFunSuite
 
 import org.apache.kyuubi.Utils
 import org.apache.kyuubi.plugin.spark.authz.{AccessControlException, 
SparkSessionProvider}
+import org.apache.kyuubi.plugin.spark.authz.MysqlContainerEnv
 import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
 import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
 import org.apache.kyuubi.plugin.spark.authz.rule.Authorization.KYUUBI_AUTHZ_TAG
@@ -45,13 +46,40 @@ import 
org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
 import org.apache.kyuubi.util.AssertionUtils._
 import org.apache.kyuubi.util.reflect.ReflectUtils._
 abstract class RangerSparkExtensionSuite extends AnyFunSuite
-  with SparkSessionProvider with BeforeAndAfterAll {
+  with SparkSessionProvider with BeforeAndAfterAll with MysqlContainerEnv {
   // scalastyle:on
   override protected val extension: SparkSessionExtensions => Unit = new 
RangerSparkExtension
 
+  var mysqlJdbcUrl = ""
+  var mysqlUsername = ""
+  var mysqlPassword = ""
+  var driverClassName = ""
+
+  override def getMysqlJdbcUrl: String = mysqlJdbcUrl
+
+  override def getMysqlUsername: String = mysqlUsername
+
+  override def getMysqlPassword: String = mysqlPassword
+
+  override def getDriverClassName: String = driverClassName
+
   override def afterAll(): Unit = {
     spark.stop()
     super.afterAll()
+    if (useMysqlEnv) {
+      stopEngine()
+    }
+  }
+
+  override def beforeAll(): Unit = {
+    if (useMysqlEnv) {
+      startEngine()
+      this.mysqlJdbcUrl = containerDef.jdbcUrl
+      this.mysqlUsername = containerDef.username
+      this.mysqlPassword = containerDef.password
+      this.driverClassName = containerDef.driverClassName
+    }
+    super.beforeAll()
   }
 
   protected def errorMessage(

Reply via email to