This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 8a67796984 [KYUUBI #7256] Enable authZ compile support for Spark 4.0
and refactor some test methods
8a67796984 is described below
commit 8a67796984b54abaebe0f0cbeddb7c059f51e531
Author: Cheng Pan <[email protected]>
AuthorDate: Mon Nov 24 19:38:32 2025 +0800
[KYUUBI #7256] Enable authZ compile support for Spark 4.0 and refactor some
test methods
### Why are the changes needed?
This PR enables authZ compile support for Spark 4.0
```
build/mvn -Pspark-4.0 -Pscala-2.13 -pl extensions/spark/kyuubi-spark-authz
-am install -DskipTests
```
```
[ERROR] [Error]
/Users/chengpan/Projects/apache-kyuubi/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala:19:
object Strategy is not a member of package org.apache.spark.sql
[ERROR] [Error]
/Users/chengpan/Projects/apache-kyuubi/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala:23:
not found: type Strategy
[ERROR] [Error]
/Users/chengpan/Projects/apache-kyuubi/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtension.scala:58:
type mismatch;
found :
org.apache.kyuubi.plugin.spark.authz.rule.rowfilter.FilterDataSourceV2Strategy.type
required: v1.StrategyBuilder
(which expands to) org.apache.spark.sql.SparkSession =>
org.apache.spark.sql.execution.SparkStrategy
[ERROR] three errors found
```
In addition, it refactors two methods in the test helper class
`SparkSessionProvider`
1. Refactor `isCatalogSupportPurge` to an abstract method `supportPurge`
because some UTs do not rely on the current catalog.
2. Add a new helper method `def doAs[T](user: String)(f: => T): T`, now the
caller can use it
```
doAs("someone") {
...
}
```
### How was this patch tested?
Pass GHA to ensure it breaks nothing, manually tested Spark 4.0 compile
```
build/mvn -Pspark-4.0 -Pscala-2.13 -pl extensions/spark/kyuubi-spark-authz
-am install -DskipTests
```
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #7256 from pan3793/authz-refactor.
Closes #7256
b84cec803 [Cheng Pan] add missing override
ede364f17 [Cheng Pan] Enable authZ compile support for Spark 4.0 and
refactor some test methods
Authored-by: Cheng Pan <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
---
.../rowfilter/FilterDataSourceV2Strategy.scala | 5 ++--
.../plugin/spark/authz/SparkSessionProvider.scala | 34 +++++++++-------------
.../DeltaCatalogRangerSparkExtensionSuite.scala | 1 +
.../PaimonCatalogRangerSparkExtensionSuite.scala | 1 +
...JdbcTableCatalogRangerSparkExtensionSuite.scala | 1 +
.../datamasking/DataMaskingForJDBCV2Suite.scala | 2 ++
.../rowfiltering/RowFilteringForJDBCV2Suite.scala | 2 ++
7 files changed, 24 insertions(+), 22 deletions(-)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala
index 18e2342e36..07776a2916 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala
@@ -16,11 +16,12 @@
*/
package org.apache.kyuubi.plugin.spark.authz.rule.rowfilter
-import org.apache.spark.sql.{SparkSession, Strategy}
+import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.SparkPlan
+import org.apache.spark.sql.execution.SparkStrategy
-case class FilterDataSourceV2Strategy(spark: SparkSession) extends Strategy {
+case class FilterDataSourceV2Strategy(spark: SparkSession) extends
SparkStrategy {
override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case ObjectFilterPlaceHolder(child) if child.nodeName == "ShowNamespaces"
=>
spark.sessionState.planner.plan(child)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
index f3ecc90f12..6a9e12701d 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
@@ -27,11 +27,10 @@ import org.scalatest.Assertions._
import org.apache.kyuubi.Utils
import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
-import
org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
-import
org.apache.kyuubi.plugin.spark.authz.ranger.DeltaCatalogRangerSparkExtensionSuite._
trait SparkSessionProvider {
protected val catalogImpl: String
+ protected def supportPurge: Boolean = true
protected def format: String = if (catalogImpl == "hive") "hive" else
"parquet"
protected val extension: SparkSessionExtensions => Unit = _ => ()
@@ -85,26 +84,29 @@ trait SparkSessionProvider {
protected val sql: String => DataFrame = spark.sql
- protected def doAs[T](user: String, f: => T): T = {
+ protected def doAs[T](user: String, f: => T, unused: String = ""): T = {
UserGroupInformation.createRemoteUser(user).doAs[T](
new PrivilegedExceptionAction[T] {
override def run(): T = f
})
}
+
+ protected def doAs[T](user: String)(f: => T): T = {
+ UserGroupInformation.createRemoteUser(user).doAs[T](
+ new PrivilegedExceptionAction[T] {
+ override def run(): T = f
+ })
+ }
+
protected def withCleanTmpResources[T](res: Seq[(String, String)])(f: => T):
T = {
try {
f
} finally {
res.foreach {
- case (t, "table") => doAs(
- admin, {
- val purgeOption =
- if (isCatalogSupportPurge(
- spark.sessionState.catalogManager.currentCatalog.name())) {
- "PURGE"
- } else ""
- sql(s"DROP TABLE IF EXISTS $t $purgeOption")
- })
+ case (t, "table") => doAs(admin) {
+ val purgeOption = if (supportPurge) "PURGE" else ""
+ sql(s"DROP TABLE IF EXISTS $t $purgeOption")
+ }
case (db, "database") => doAs(admin, sql(s"DROP DATABASE IF EXISTS
$db"))
case (fn, "function") => doAs(admin, sql(s"DROP FUNCTION IF EXISTS
$fn"))
case (view, "view") => doAs(admin, sql(s"DROP VIEW IF EXISTS $view"))
@@ -118,12 +120,4 @@ trait SparkSessionProvider {
protected def checkAnswer(user: String, query: String, result: Seq[Row]):
Unit = {
doAs(user, assert(sql(query).collect() === result))
}
-
- private def isCatalogSupportPurge(catalogName: String): Boolean = {
- val unsupportedCatalogs = Set(v2JdbcTableCatalogClassName,
deltaCatalogClassName)
- spark.conf.getOption(s"spark.sql.catalog.$catalogName") match {
- case Some(catalog) if !unsupportedCatalogs.contains(catalog) => true
- case _ => false
- }
- }
}
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
index 595f243a0f..75af93c7c6 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
@@ -35,6 +35,7 @@ import org.apache.kyuubi.util.AssertionUtils._
@DeltaTest
class DeltaCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
override protected val catalogImpl: String = "hive"
+ override protected val supportPurge: Boolean = false
override protected val sqlExtensions: String =
"io.delta.sql.DeltaSparkSessionExtension"
val namespace1 = deltaNamespace
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
index 44249e58a4..bce206409d 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
@@ -31,6 +31,7 @@ import org.apache.kyuubi.util.AssertionUtils._
@PaimonTest
class PaimonCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite
{
override protected val catalogImpl: String = "hive"
+ override protected val supportPurge: Boolean = false
private def isSupportedVersion = isScalaV212
override protected val sqlExtensions: String =
if (isSupportedVersion)
"org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions"
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
index f7e886c30c..db666f10b9 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
@@ -33,6 +33,7 @@ import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
*/
class V2JdbcTableCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
override protected val catalogImpl: String = "in-memory"
+ override protected val supportPurge: Boolean = false
val catalogV2 = "testcat"
val jdbcCatalogV2 = "jdbc2"
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
index d25c4bda09..a6af1671b3 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
@@ -37,6 +37,8 @@ class DataMaskingForJDBCV2Suite extends DataMaskingTestBase {
override protected val catalogImpl: String = "in-memory"
+ override protected val supportPurge: Boolean = false
+
override protected def format: String = ""
override def beforeAll(): Unit = {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
index 5305c7bd46..cec33c2935 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
@@ -38,6 +38,8 @@ class RowFilteringForJDBCV2Suite extends RowFilteringTestBase
{
override protected val catalogImpl: String = "in-memory"
+ override protected val supportPurge: Boolean = false
+
override protected def format: String = ""
override def beforeAll(): Unit = {