This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch branch-1.10
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/branch-1.10 by this push:
new e9f4864b1c [KYUUBI #7256] Enable authZ compile support for Spark 4.0
and refactor some test methods
e9f4864b1c is described below
commit e9f4864b1cbbaf5174b2bc855d807eb211b0c60e
Author: Cheng Pan <[email protected]>
AuthorDate: Mon Nov 24 19:38:32 2025 +0800
[KYUUBI #7256] Enable authZ compile support for Spark 4.0 and refactor some
test methods
This PR enables authZ compile support for Spark 4.0
```
build/mvn -Pspark-4.0 -Pscala-2.13 -pl extensions/spark/kyuubi-spark-authz
-am install -DskipTests
```
```
[ERROR] [Error]
/Users/chengpan/Projects/apache-kyuubi/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala:19:
object Strategy is not a member of package org.apache.spark.sql
[ERROR] [Error]
/Users/chengpan/Projects/apache-kyuubi/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala:23:
not found: type Strategy
[ERROR] [Error]
/Users/chengpan/Projects/apache-kyuubi/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RangerSparkExtension.scala:58:
type mismatch;
found :
org.apache.kyuubi.plugin.spark.authz.rule.rowfilter.FilterDataSourceV2Strategy.type
required: v1.StrategyBuilder
(which expands to) org.apache.spark.sql.SparkSession =>
org.apache.spark.sql.execution.SparkStrategy
[ERROR] three errors found
```
In addition, it refactors two methods in the test helper class
`SparkSessionProvider`
1. Refactor `isCatalogSupportPurge` to an abstract method `supportPurge`
because some UTs do not rely on the current catalog.
2. Add a new helper method `def doAs[T](user: String)(f: => T): T`, now the
caller can use it
```
doAs("someone") {
...
}
```
Pass GHA to ensure it breaks nothing, manually tested Spark 4.0 compile
```
build/mvn -Pspark-4.0 -Pscala-2.13 -pl extensions/spark/kyuubi-spark-authz
-am install -DskipTests
```
No.
Closes #7256 from pan3793/authz-refactor.
Closes #7256
b84cec803 [Cheng Pan] add missing override
ede364f17 [Cheng Pan] Enable authZ compile support for Spark 4.0 and
refactor some test methods
Authored-by: Cheng Pan <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
(cherry picked from commit 8a67796984b54abaebe0f0cbeddb7c059f51e531)
Signed-off-by: Cheng Pan <[email protected]>
---
.../rowfilter/FilterDataSourceV2Strategy.scala | 5 ++--
.../plugin/spark/authz/SparkSessionProvider.scala | 34 +++++++++-------------
.../DeltaCatalogRangerSparkExtensionSuite.scala | 1 +
.../PaimonCatalogRangerSparkExtensionSuite.scala | 1 +
...JdbcTableCatalogRangerSparkExtensionSuite.scala | 1 +
.../datamasking/DataMaskingForJDBCV2Suite.scala | 2 ++
.../rowfiltering/RowFilteringForJDBCV2Suite.scala | 2 ++
7 files changed, 24 insertions(+), 22 deletions(-)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala
index e268ed6bc7..bde5757407 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/rule/rowfilter/FilterDataSourceV2Strategy.scala
@@ -16,11 +16,12 @@
*/
package org.apache.kyuubi.plugin.spark.authz.rule.rowfilter
-import org.apache.spark.sql.{SparkSession, Strategy}
+import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project}
import org.apache.spark.sql.execution.SparkPlan
+import org.apache.spark.sql.execution.SparkStrategy
-case class FilterDataSourceV2Strategy(spark: SparkSession) extends Strategy {
+case class FilterDataSourceV2Strategy(spark: SparkSession) extends
SparkStrategy {
override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
// For Spark 3.1 and below, `ColumnPruning` rule will set
`ObjectFilterPlaceHolder#child` to
// `Project`
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
index 7aa4d99e45..af0d1950ca 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
@@ -27,12 +27,11 @@ import org.scalatest.Assertions._
import org.apache.kyuubi.Utils
import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
-import
org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
-import
org.apache.kyuubi.plugin.spark.authz.ranger.DeltaCatalogRangerSparkExtensionSuite._
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
trait SparkSessionProvider {
protected val catalogImpl: String
+ protected def supportPurge: Boolean = true
protected def format: String = if (catalogImpl == "hive") "hive" else
"parquet"
protected val extension: SparkSessionExtensions => Unit = _ => ()
@@ -70,26 +69,29 @@ trait SparkSessionProvider {
protected val sql: String => DataFrame = spark.sql
- protected def doAs[T](user: String, f: => T): T = {
+ protected def doAs[T](user: String, f: => T, unused: String = ""): T = {
UserGroupInformation.createRemoteUser(user).doAs[T](
new PrivilegedExceptionAction[T] {
override def run(): T = f
})
}
+
+ protected def doAs[T](user: String)(f: => T): T = {
+ UserGroupInformation.createRemoteUser(user).doAs[T](
+ new PrivilegedExceptionAction[T] {
+ override def run(): T = f
+ })
+ }
+
protected def withCleanTmpResources[T](res: Seq[(String, String)])(f: => T):
T = {
try {
f
} finally {
res.foreach {
- case (t, "table") => doAs(
- admin, {
- val purgeOption =
- if (isSparkV32OrGreater && isCatalogSupportPurge(
- spark.sessionState.catalogManager.currentCatalog.name())) {
- "PURGE"
- } else ""
- sql(s"DROP TABLE IF EXISTS $t $purgeOption")
- })
+ case (t, "table") => doAs(admin) {
+ val purgeOption = if (isSparkV32OrGreater && supportPurge) "PURGE"
else ""
+ sql(s"DROP TABLE IF EXISTS $t $purgeOption")
+ }
case (db, "database") => doAs(admin, sql(s"DROP DATABASE IF EXISTS
$db"))
case (fn, "function") => doAs(admin, sql(s"DROP FUNCTION IF EXISTS
$fn"))
case (view, "view") => doAs(admin, sql(s"DROP VIEW IF EXISTS $view"))
@@ -105,12 +107,4 @@ trait SparkSessionProvider {
protected def checkAnswer(user: String, query: String, result: Seq[Row]):
Unit = {
doAs(user, assert(sql(query).collect() === result))
}
-
- private def isCatalogSupportPurge(catalogName: String): Boolean = {
- val unsupportedCatalogs = Set(v2JdbcTableCatalogClassName,
deltaCatalogClassName)
- spark.conf.getOption(s"spark.sql.catalog.$catalogName") match {
- case Some(catalog) if !unsupportedCatalogs.contains(catalog) => true
- case _ => false
- }
- }
}
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
index dbf88d7d02..b2c3b2e5ad 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/DeltaCatalogRangerSparkExtensionSuite.scala
@@ -35,6 +35,7 @@ import org.apache.kyuubi.util.AssertionUtils._
@DeltaTest
class DeltaCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite {
override protected val catalogImpl: String = "hive"
+ override protected val supportPurge: Boolean = false
override protected val sqlExtensions: String =
"io.delta.sql.DeltaSparkSessionExtension"
val namespace1 = deltaNamespace
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
index 841520aeb9..3f3aafa033 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/PaimonCatalogRangerSparkExtensionSuite.scala
@@ -31,6 +31,7 @@ import org.apache.kyuubi.util.AssertionUtils._
@PaimonTest
class PaimonCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSuite
{
override protected val catalogImpl: String = "hive"
+ override protected val supportPurge: Boolean = false
private def isSupportedVersion = isScalaV212
val catalogV2 = "paimon_catalog"
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
index 49d51e5d24..05c46f7359 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala
@@ -33,6 +33,7 @@ import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
*/
class V2JdbcTableCatalogRangerSparkExtensionSuite extends
RangerSparkExtensionSuite {
override protected val catalogImpl: String = "in-memory"
+ override protected val supportPurge: Boolean = false
val catalogV2 = "testcat"
val jdbcCatalogV2 = "jdbc2"
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
index d25c4bda09..a6af1671b3 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala
@@ -37,6 +37,8 @@ class DataMaskingForJDBCV2Suite extends DataMaskingTestBase {
override protected val catalogImpl: String = "in-memory"
+ override protected val supportPurge: Boolean = false
+
override protected def format: String = ""
override def beforeAll(): Unit = {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
index 5305c7bd46..cec33c2935 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala
@@ -38,6 +38,8 @@ class RowFilteringForJDBCV2Suite extends RowFilteringTestBase
{
override protected val catalogImpl: String = "in-memory"
+ override protected val supportPurge: Boolean = false
+
override protected def format: String = ""
override def beforeAll(): Unit = {