This is an automated email from the ASF dual-hosted git repository.
bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 0a04f0839 [KYUUBI #5055] [Authz] Support building function privileges
in Spark 3.4
0a04f0839 is described below
commit 0a04f083942d86c29b67e2dcbfc0fc0961a166dc
Author: Deng An <[email protected]>
AuthorDate: Tue Aug 8 15:15:23 2023 +0800
[KYUUBI #5055] [Authz] Support building function privileges in Spark 3.4
### _Why are the changes needed?_
Add support for function privileges building in 3.4 for #4167
### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including
negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [x] [Run
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
locally before make a pull request
Closes #5055 from packyan/PR_4167_follow_up_support_spark_3.4.
Closes #5055
46fe89e01 [Deng An] add support for function privileges building in 3.4
Authored-by: Deng An <[email protected]>
Signed-off-by: liangbowen <[email protected]>
---
.../plugin/spark/authz/PrivilegesBuilder.scala | 26 +++++++++++++++-------
.../kyuubi/plugin/spark/authz/serde/Function.scala | 4 ++--
.../spark/authz/serde/functionExtractors.scala | 24 +++++++++++---------
.../spark/authz/serde/functionTypeExtractors.scala | 6 ++---
.../authz/FunctionPrivilegesBuilderSuite.scala | 5 -----
5 files changed, 36 insertions(+), 29 deletions(-)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
index b2fa89909..08dc49d49 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
@@ -220,15 +220,25 @@ object PrivilegesBuilder {
plan: LogicalPlan,
spark: SparkSession): PrivilegesAndOpType = {
val inputObjs = new ArrayBuffer[PrivilegeObject]
- // TODO: add support for Spark 3.4.x
- plan transformAllExpressions {
- case hiveFunction: Expression if isKnownFunction(hiveFunction) =>
- val functionSpec: ScanSpec = getFunctionSpec(hiveFunction)
- if
(functionSpec.functionDescs.exists(!_.functionTypeDesc.get.skip(hiveFunction,
spark))) {
- functionSpec.functions(hiveFunction).foreach(func =>
- inputObjs += PrivilegeObject(func))
+ plan match {
+ case command: Command if isKnownTableCommand(command) =>
+ val spec = getTableCommandSpec(command)
+ val functionPrivAndOpType = spec.queries(plan)
+ .map(plan => buildFunctions(plan, spark))
+ functionPrivAndOpType.map(_._1)
+ .reduce(_ ++ _)
+ .foreach(functionPriv => inputObjs += functionPriv)
+
+ case plan => plan transformAllExpressions {
+ case hiveFunction: Expression if isKnownFunction(hiveFunction) =>
+ val functionSpec: ScanSpec = getFunctionSpec(hiveFunction)
+ if (functionSpec.functionDescs
+ .exists(!_.functionTypeDesc.get.skip(hiveFunction, spark))) {
+ functionSpec.functions(hiveFunction).foreach(func =>
+ inputObjs += PrivilegeObject(func))
+ }
+ hiveFunction
}
- hiveFunction
}
(inputObjs, Seq.empty, OperationType.QUERY)
}
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Function.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Function.scala
index b7a0010b4..ba19972ed 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Function.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/Function.scala
@@ -21,8 +21,8 @@ package org.apache.kyuubi.plugin.spark.authz.serde
* :: Developer API ::
*
* Represents a function identity
- *
+ * @param catalog
* @param database
* @param functionName
*/
-case class Function(database: Option[String], functionName: String)
+case class Function(catalog: Option[String], database: Option[String],
functionName: String)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionExtractors.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionExtractors.scala
index 729521200..bcd5f2665 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionExtractors.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionExtractors.scala
@@ -20,7 +20,7 @@ package org.apache.kyuubi.plugin.spark.authz.serde
import org.apache.spark.sql.catalyst.FunctionIdentifier
import org.apache.spark.sql.catalyst.expressions.ExpressionInfo
-import
org.apache.kyuubi.plugin.spark.authz.serde.FunctionExtractor.buildFunctionIdentFromQualifiedName
+import
org.apache.kyuubi.plugin.spark.authz.serde.FunctionExtractor.buildFunctionFromQualifiedName
trait FunctionExtractor extends (AnyRef => Function) with Extractor
@@ -29,13 +29,16 @@ object FunctionExtractor {
loadExtractorsToMap[FunctionExtractor]
}
- def buildFunctionIdentFromQualifiedName(qualifiedName: String): (String,
Option[String]) = {
- val parts: Array[String] = qualifiedName.split("\\.", 2)
- if (parts.length == 1) {
- (qualifiedName, None)
+ private[authz] def buildFunctionFromQualifiedName(qualifiedName: String):
Function = {
+ val parts: Array[String] = qualifiedName.split("\\.")
+ val (catalog, database, functionName) = if (parts.length == 3) {
+ (Some(parts.head), Some(parts.tail.head), parts.last)
+ } else if (parts.length == 2) {
+ (None, Some(parts.head), parts.last)
} else {
- (parts.last, Some(parts.head))
+ (None, None, qualifiedName)
}
+ Function(catalog, database, functionName)
}
}
@@ -44,7 +47,7 @@ object FunctionExtractor {
*/
class StringFunctionExtractor extends FunctionExtractor {
override def apply(v1: AnyRef): Function = {
- Function(None, v1.asInstanceOf[String])
+ Function(None, None, v1.asInstanceOf[String])
}
}
@@ -54,8 +57,7 @@ class StringFunctionExtractor extends FunctionExtractor {
class QualifiedNameStringFunctionExtractor extends FunctionExtractor {
override def apply(v1: AnyRef): Function = {
val qualifiedName: String = v1.asInstanceOf[String]
- val (funcName, database) =
buildFunctionIdentFromQualifiedName(qualifiedName)
- Function(database, funcName)
+ buildFunctionFromQualifiedName(qualifiedName)
}
}
@@ -65,7 +67,7 @@ class QualifiedNameStringFunctionExtractor extends
FunctionExtractor {
class FunctionIdentifierFunctionExtractor extends FunctionExtractor {
override def apply(v1: AnyRef): Function = {
val identifier = v1.asInstanceOf[FunctionIdentifier]
- Function(identifier.database, identifier.funcName)
+ Function(None, identifier.database, identifier.funcName)
}
}
@@ -75,6 +77,6 @@ class FunctionIdentifierFunctionExtractor extends
FunctionExtractor {
class ExpressionInfoFunctionExtractor extends FunctionExtractor {
override def apply(v1: AnyRef): Function = {
val info = v1.asInstanceOf[ExpressionInfo]
- Function(Option(info.getDb), info.getName)
+ Function(None, Option(info.getDb), info.getName)
}
}
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionTypeExtractors.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionTypeExtractors.scala
index a2c5b427f..c134b5018 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionTypeExtractors.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/serde/functionTypeExtractors.scala
@@ -21,7 +21,7 @@ import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.FunctionIdentifier
import org.apache.spark.sql.catalyst.catalog.SessionCatalog
-import
org.apache.kyuubi.plugin.spark.authz.serde.FunctionExtractor.buildFunctionIdentFromQualifiedName
+import
org.apache.kyuubi.plugin.spark.authz.serde.FunctionExtractor.buildFunctionFromQualifiedName
import org.apache.kyuubi.plugin.spark.authz.serde.FunctionType.{FunctionType,
PERMANENT, SYSTEM, TEMP}
import
org.apache.kyuubi.plugin.spark.authz.serde.FunctionTypeExtractor.getFunctionType
@@ -93,7 +93,7 @@ class FunctionNameFunctionTypeExtractor extends
FunctionTypeExtractor {
override def apply(v1: AnyRef, spark: SparkSession): FunctionType = {
val catalog: SessionCatalog = spark.sessionState.catalog
val qualifiedName: String = v1.asInstanceOf[String]
- val (funcName, database) =
buildFunctionIdentFromQualifiedName(qualifiedName)
- getFunctionType(FunctionIdentifier(funcName, database), catalog)
+ val function = buildFunctionFromQualifiedName(qualifiedName)
+ getFunctionType(FunctionIdentifier(function.functionName,
function.database), catalog)
}
}
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/FunctionPrivilegesBuilderSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/FunctionPrivilegesBuilderSuite.scala
index 7181a6760..ad4b57faa 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/FunctionPrivilegesBuilderSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/FunctionPrivilegesBuilderSuite.scala
@@ -24,7 +24,6 @@ import org.scalatest.funsuite.AnyFunSuite
import org.apache.kyuubi.plugin.spark.authz.OperationType.QUERY
import org.apache.kyuubi.plugin.spark.authz.ranger.AccessType
-import
org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils.SPARK_RUNTIME_VERSION
abstract class FunctionPrivilegesBuilderSuite extends AnyFunSuite
with SparkSessionProvider with BeforeAndAfterAll with BeforeAndAfterEach {
@@ -112,7 +111,6 @@ class HiveFunctionPrivilegesBuilderSuite extends
FunctionPrivilegesBuilderSuite
override protected val catalogImpl: String = "hive"
test("Function Call Query") {
- assume(SPARK_RUNTIME_VERSION <= "3.3")
val plan = sql(s"SELECT kyuubi_fun_1('data'), " +
s"kyuubi_fun_2(value), " +
s"${reusedDb}.kyuubi_fun_0(value), " +
@@ -132,7 +130,6 @@ class HiveFunctionPrivilegesBuilderSuite extends
FunctionPrivilegesBuilderSuite
}
test("Function Call Query with Quoted Name") {
- assume(SPARK_RUNTIME_VERSION <= "3.3")
val plan = sql(s"SELECT `kyuubi_fun_1`('data'), " +
s"`kyuubi_fun_2`(value), " +
s"`${reusedDb}`.`kyuubi_fun_0`(value), " +
@@ -152,7 +149,6 @@ class HiveFunctionPrivilegesBuilderSuite extends
FunctionPrivilegesBuilderSuite
}
test("Simple Function Call Query") {
- assume(SPARK_RUNTIME_VERSION <= "3.3")
val plan = sql(s"SELECT kyuubi_fun_1('data'), " +
s"kyuubi_fun_0('value'), " +
s"${reusedDb}.kyuubi_fun_0('value'), " +
@@ -172,7 +168,6 @@ class HiveFunctionPrivilegesBuilderSuite extends
FunctionPrivilegesBuilderSuite
}
test("Function Call In CAST Command") {
- assume(SPARK_RUNTIME_VERSION <= "3.3")
val table = "castTable"
withTable(table) { table =>
val plan = sql(s"CREATE TABLE ${table} " +