This is an automated email from the ASF dual-hosted git repository.
bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 5de3a4069 [KYUUBI #5189] [AUTHZ] Make spark authz module compilable on
Scala 2.13
5de3a4069 is described below
commit 5de3a4069089cc7d8f4b97d0a5d25331e427dddb
Author: liangbowen <[email protected]>
AuthorDate: Wed Aug 23 08:41:46 2023 +0800
[KYUUBI #5189] [AUTHZ] Make spark authz module compilable on Scala 2.13
### _Why are the changes needed?_
- Make spark authz module compilable on Scala 2.13
### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including
negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [ ] [Run
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
locally before make a pull request
### _Was this patch authored or co-authored using generative AI tooling?_
No.
Closes #5189 from bowenliang123/scala213-authz.
Closes #5189
c5ee61a24 [liangbowen] adapt spark authz module to 2.13
Authored-by: liangbowen <[email protected]>
Signed-off-by: liangbowen <[email protected]>
---
.../apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala | 2 +-
.../kyuubi/plugin/spark/authz/ranger/RuleAuthorization.scala | 4 ++--
.../kyuubi/plugin/spark/authz/SparkSessionProvider.scala | 2 +-
.../kyuubi/plugin/spark/authz/V2CommandsPrivilegesSuite.scala | 2 +-
.../spark/authz/V2JdbcTableCatalogPrivilegesBuilderSuite.scala | 10 +++++-----
5 files changed, 10 insertions(+), 10 deletions(-)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
index 08dc49d49..5c496b874 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
@@ -209,7 +209,7 @@ object PrivilegesBuilder {
}
}
- type PrivilegesAndOpType = (Seq[PrivilegeObject], Seq[PrivilegeObject],
OperationType)
+ type PrivilegesAndOpType = (Iterable[PrivilegeObject],
Iterable[PrivilegeObject], OperationType)
/**
* Build input privilege objects from a Spark's LogicalPlan for hive
permanent udf
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleAuthorization.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleAuthorization.scala
index 3d53174f3..3203108df 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleAuthorization.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/ranger/RuleAuthorization.scala
@@ -53,7 +53,7 @@ object RuleAuthorization {
requests += AccessRequest(resource, ugi, opType, AccessType.USE)
}
- def addAccessRequest(objects: Seq[PrivilegeObject], isInput: Boolean):
Unit = {
+ def addAccessRequest(objects: Iterable[PrivilegeObject], isInput:
Boolean): Unit = {
objects.foreach { obj =>
val resource = AccessResource(obj, opType)
val accessType = ranger.AccessType(obj, opType, isInput)
@@ -84,7 +84,7 @@ object RuleAuthorization {
}
case _ => Seq(request)
}
- }
+ }.toSeq
if (authorizeInSingleCall) {
verify(requestArrays.flatten, auditHandler)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
index 774dbd8b5..e6f70b4d1 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala
@@ -33,7 +33,7 @@ trait SparkSessionProvider {
protected val catalogImpl: String
protected def format: String = if (catalogImpl == "hive") "hive" else
"parquet"
- protected val extension: SparkSessionExtensions => Unit = _ => Unit
+ protected val extension: SparkSessionExtensions => Unit = _ => ()
protected val sqlExtensions: String = ""
protected val extraSparkConf: SparkConf = new SparkConf()
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2CommandsPrivilegesSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2CommandsPrivilegesSuite.scala
index a1b5f366e..3ebea1ce9 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2CommandsPrivilegesSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2CommandsPrivilegesSuite.scala
@@ -670,7 +670,7 @@ abstract class V2CommandsPrivilegesSuite extends
PrivilegesBuilderSuite {
val spec = DB_COMMAND_SPECS(plan1.getClass.getName)
var db: Database = null
spec.databaseDescs.find { d =>
- Try(db = d.extract(plan1)).isSuccess
+ Try { db = d.extract(plan1) }.isSuccess
}
withClue(sql1) {
assert(db.catalog === None)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2JdbcTableCatalogPrivilegesBuilderSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2JdbcTableCatalogPrivilegesBuilderSuite.scala
index 582b91fd9..1037d9811 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2JdbcTableCatalogPrivilegesBuilderSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2JdbcTableCatalogPrivilegesBuilderSuite.scala
@@ -79,7 +79,7 @@ class V2JdbcTableCatalogPrivilegesBuilderSuite extends
V2CommandsPrivilegesSuite
val spec = TABLE_COMMAND_SPECS(plan.getClass.getName)
var table: Table = null
spec.tableDescs.find { d =>
- Try(table = d.extract(plan, spark).get).isSuccess
+ Try { table = d.extract(plan, spark).get }.isSuccess
}
withClue(str) {
assertEqualsIgnoreCase(Some(catalogV2))(table.catalog)
@@ -104,7 +104,7 @@ class V2JdbcTableCatalogPrivilegesBuilderSuite extends
V2CommandsPrivilegesSuite
val spec = TABLE_COMMAND_SPECS(plan.getClass.getName)
var table: Table = null
spec.tableDescs.find { d =>
- Try(table = d.extract(plan, spark).get).isSuccess
+ Try { table = d.extract(plan, spark).get }.isSuccess
}
withClue(sql1) {
assertEqualsIgnoreCase(Some(catalogV2))(table.catalog)
@@ -127,7 +127,7 @@ class V2JdbcTableCatalogPrivilegesBuilderSuite extends
V2CommandsPrivilegesSuite
val plan = executePlan(sql1).analyzed
val spec = TABLE_COMMAND_SPECS(plan.getClass.getName)
var table: Table = null
- spec.tableDescs.find { d => Try(table = d.extract(plan,
spark).get).isSuccess }
+ spec.tableDescs.find { d => Try { table = d.extract(plan, spark).get
}.isSuccess }
withClue(sql1) {
assertEqualsIgnoreCase(Some(catalogV2))(table.catalog)
assertEqualsIgnoreCase(Some(ns1))(table.database)
@@ -146,7 +146,7 @@ class V2JdbcTableCatalogPrivilegesBuilderSuite extends
V2CommandsPrivilegesSuite
val spec = DB_COMMAND_SPECS(plan.getClass.getName)
var db: Database = null
spec.databaseDescs.find { d =>
- Try(db = d.extract(plan)).isSuccess
+ Try { db = d.extract(plan) }.isSuccess
}
withClue(sql) {
assertEqualsIgnoreCase(Some(catalogV2))(db.catalog)
@@ -165,7 +165,7 @@ class V2JdbcTableCatalogPrivilegesBuilderSuite extends
V2CommandsPrivilegesSuite
val spec = DB_COMMAND_SPECS(plan.getClass.getName)
var db: Database = null
spec.databaseDescs.find { d =>
- Try(db = d.extract(plan)).isSuccess
+ Try { db = d.extract(plan) }.isSuccess
}
withClue(sql1) {
assertEqualsIgnoreCase(Some(catalogV2))(db.catalog)