This is an automated email from the ASF dual-hosted git repository.
bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 1b229b63a [KYUUBI #5323] [AUTHZ] Drop Hive and Iceberg tables with
PURGE option in tests
1b229b63a is described below
commit 1b229b63a0f101d495057cdbee6f81ee8b426006
Author: Bowen Liang <[email protected]>
AuthorDate: Fri Oct 13 14:46:53 2023 +0800
[KYUUBI #5323] [AUTHZ] Drop Hive and Iceberg tables with PURGE option in
tests
### _Why are the changes needed?_
- `DROP TABLE` for Iceberg tables only removes the table from catalog by
default, which may contaminates other tests with same table
- Enable PURGE option for dropping Iceberg and Hive table
- Iceberg Spark DDL `DROP TABLE ... PURGE`
- To drop the table from the catalog and delete the table’s contents
### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including
negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [ ] [Run
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
locally before make a pull request
### _Was this patch authored or co-authored using generative AI tooling?_
Closes #5323 from bowenliang123/iceberg-purge.
Closes #5323
ce4188dd2 [Bowen Liang] purge
Authored-by: Bowen Liang <[email protected]>
Signed-off-by: Bowen Liang <[email protected]>
---
.../plugin/spark/authz/SparkSessionProvider.scala | 18 +++++++++++++++++-
.../V2JdbcTableCatalogPrivilegesBuilderSuite.scala | 10 +++++++---
.../V2JdbcTableCatalogRangerSparkExtensionSuite.scala | 5 ++---
.../ranger/datamasking/DataMaskingForJDBCV2Suite.scala | 6 +++---
.../rowfiltering/RowFilteringForJDBCV2Suite.scala | 6 +++---
.../apache/kyuubi/operation/IcebergMetadataTests.scala | 2 +-
6 files changed, 33 insertions(+), 14 deletions(-)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
index e6f70b4d1..c7e541ef5 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
@@ -27,6 +27,7 @@ import org.scalatest.Assertions._
import org.apache.kyuubi.Utils
import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
+import
org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
trait SparkSessionProvider {
@@ -79,7 +80,15 @@ trait SparkSessionProvider {
f
} finally {
res.foreach {
- case (t, "table") => doAs(admin, sql(s"DROP TABLE IF EXISTS $t"))
+ case (t, "table") => doAs(
+ admin, {
+ val purgeOption =
+ if (isSparkV32OrGreater && isCatalogSupportPurge(
+ spark.sessionState.catalogManager.currentCatalog.name())) {
+ "PURGE"
+ } else ""
+ sql(s"DROP TABLE IF EXISTS $t $purgeOption")
+ })
case (db, "database") => doAs(admin, sql(s"DROP DATABASE IF EXISTS
$db"))
case (fn, "function") => doAs(admin, sql(s"DROP FUNCTION IF EXISTS
$fn"))
case (view, "view") => doAs(admin, sql(s"DROP VIEW IF EXISTS $view"))
@@ -96,4 +105,11 @@ trait SparkSessionProvider {
doAs(user, assert(sql(query).collect() === result))
}
+ private def isCatalogSupportPurge(catalogName: String): Boolean = {
+ val unsupportedCatalogs = Set(v2JdbcTableCatalogClassName)
+ spark.conf.getOption(s"spark.sql.catalog.$catalogName") match {
+ case Some(catalog) if !unsupportedCatalogs.contains(catalog) => true
+ case _ => false
+ }
+ }
}
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2JdbcTableCatalogPrivilegesBuilderSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2JdbcTableCatalogPrivilegesBuilderSuite.scala
index 4fe13201d..d1a6f4ae8 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2JdbcTableCatalogPrivilegesBuilderSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2JdbcTableCatalogPrivilegesBuilderSuite.scala
@@ -22,6 +22,7 @@ import scala.util.Try
import org.scalatest.Outcome
+import
org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
import org.apache.kyuubi.plugin.spark.authz.serde._
import org.apache.kyuubi.util.AssertionUtils._
@@ -38,9 +39,7 @@ class V2JdbcTableCatalogPrivilegesBuilderSuite extends
V2CommandsPrivilegesSuite
val jdbcUrl: String = s"$dbUrl;create=true"
override def beforeAll(): Unit = {
- spark.conf.set(
- s"spark.sql.catalog.$catalogV2",
- "org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog")
+ spark.conf.set(s"spark.sql.catalog.$catalogV2",
v2JdbcTableCatalogClassName)
spark.conf.set(s"spark.sql.catalog.$catalogV2.url", jdbcUrl)
spark.conf.set(
s"spark.sql.catalog.$catalogV2.driver",
@@ -170,3 +169,8 @@ class V2JdbcTableCatalogPrivilegesBuilderSuite extends
V2CommandsPrivilegesSuite
}
}
}
+
+object V2JdbcTableCatalogPrivilegesBuilderSuite {
+ val v2JdbcTableCatalogClassName: String =
+ "org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog"
+}
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
index 253880bbf..046052d55 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
@@ -24,6 +24,7 @@ import scala.util.Try
import org.apache.kyuubi.plugin.spark.authz.AccessControlException
import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
+import
org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
/**
@@ -44,9 +45,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSu
val jdbcUrl: String = s"$dbUrl;create=true"
override def beforeAll(): Unit = {
- spark.conf.set(
- s"spark.sql.catalog.$catalogV2",
- "org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog")
+ spark.conf.set(s"spark.sql.catalog.$catalogV2",
v2JdbcTableCatalogClassName)
spark.conf.set(s"spark.sql.catalog.$catalogV2.url", jdbcUrl)
spark.conf.set(
s"spark.sql.catalog.$catalogV2.driver",
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
index 249d90352..411d98cf9 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
@@ -23,13 +23,13 @@ import scala.util.Try
import org.apache.spark.SparkConf
import org.scalatest.Outcome
+import
org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
+
class DataMaskingForJDBCV2Suite extends DataMaskingTestBase {
override protected val extraSparkConf: SparkConf = {
new SparkConf()
.set("spark.sql.defaultCatalog", "testcat")
- .set(
- "spark.sql.catalog.testcat",
- "org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog")
+ .set("spark.sql.catalog.testcat", v2JdbcTableCatalogClassName)
.set(s"spark.sql.catalog.testcat.url",
"jdbc:derby:memory:testcat;create=true")
.set(
s"spark.sql.catalog.testcat.driver",
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
index 7d20d0515..bfe1cd9e4 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
@@ -24,13 +24,13 @@ import scala.util.Try
import org.apache.spark.SparkConf
import org.scalatest.Outcome
+import
org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
+
class RowFilteringForJDBCV2Suite extends RowFilteringTestBase {
override protected val extraSparkConf: SparkConf = {
new SparkConf()
.set("spark.sql.defaultCatalog", "testcat")
- .set(
- "spark.sql.catalog.testcat",
- "org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog")
+ .set("spark.sql.catalog.testcat", v2JdbcTableCatalogClassName)
.set(s"spark.sql.catalog.testcat.url",
"jdbc:derby:memory:testcat;create=true")
.set(
s"spark.sql.catalog.testcat.driver",
diff --git
a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/IcebergMetadataTests.scala
b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/IcebergMetadataTests.scala
index 99482f0c5..814c08343 100644
---
a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/IcebergMetadataTests.scala
+++
b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/IcebergMetadataTests.scala
@@ -133,7 +133,7 @@ trait IcebergMetadataTests extends HiveJDBCTestHelper with
IcebergSuiteMixin wit
}
assert(!rs1.next())
} finally {
- statement.execute(s"DROP TABLE IF EXISTS $cg.$db.tbl")
+ statement.execute(s"DROP TABLE IF EXISTS $cg.$db.tbl PURGE")
}
}
}