This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new bc3fd3af1 [KYUUBI #3592] Spark SQL authz only consider persistent
functions
bc3fd3af1 is described below
commit bc3fd3af1cc6397db080438c270fe52d35c8ce9f
Author: Deng An <[email protected]>
AuthorDate: Thu Oct 20 20:26:06 2022 +0800
[KYUUBI #3592] Spark SQL authz only consider persistent functions
### _Why are the changes needed?_
to close #3592
### _How was this patch tested?_
- [x] Add some test cases that check the changes thoroughly including
negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [x] [Run
test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests)
locally before make a pull request
Closes #3593 from
packyan/branch-spark-sql-authz-only-check-permanent-functions.
Closes #3592
fb59e3d8 [Deng An] isPersistentFunction use current db when db name is
missing.
4e708976 [Deng An] use Option other than Some as info.getDb may return null.
5aefc66d [Deng An] fix unit test failed
ac38e0c2 [Deng An] generalizing isPersistentFunction method.
fcef751b [Deng An] Update
extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
532fd73d [Deng An] Update
extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
7877d916 [Deng An] optimize the code
c7005b36 [packyan] Spark SQL authz only consider permanent functions
Lead-authored-by: Deng An <[email protected]>
Co-authored-by: Deng An <[email protected]>
Co-authored-by: packyan <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
---
.../plugin/spark/authz/PrivilegesBuilder.scala | 37 ++++++++++++++++---
.../spark/authz/PrivilegesBuilderSuite.scala | 41 ++++++++++++++++++++++
2 files changed, 73 insertions(+), 5 deletions(-)
diff --git
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
index 89445d224..fe636e8d5 100644
---
a/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/main/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilder.scala
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.{FunctionIdentifier,
TableIdentifier}
import org.apache.spark.sql.catalyst.analysis.{PersistedView, ViewType}
import org.apache.spark.sql.catalyst.catalog.CatalogTable
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
-import org.apache.spark.sql.catalyst.expressions.{Expression, NamedExpression}
+import org.apache.spark.sql.catalyst.expressions.{Expression, ExpressionInfo,
NamedExpression}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.connector.catalog.Identifier
import org.apache.spark.sql.execution.datasources.LogicalRelation
@@ -84,6 +84,16 @@ object PrivilegesBuilder {
spark.sessionState.catalog.isTempView(parts)
}
+ private def isPersistentFunction(
+ functionIdent: FunctionIdentifier,
+ spark: SparkSession): Boolean = {
+ val (database, funcName) = functionIdent.database match {
+ case Some(_) => (functionIdent.database, functionIdent.funcName)
+ case _ => (Some(spark.catalog.currentDatabase), functionIdent.funcName)
+ }
+
spark.sessionState.catalog.isPersistentFunction(FunctionIdentifier(funcName,
database))
+ }
+
/**
* Build PrivilegeObjects from Spark LogicalPlan
*
@@ -353,8 +363,15 @@ object PrivilegesBuilder {
buildQuery(getQuery, inputObjs)
case "CreateFunctionCommand" |
- "DropFunctionCommand" |
- "RefreshFunctionCommand" =>
+ "DropFunctionCommand" =>
+ val isTemp = getPlanField[Boolean]("isTemp")
+ if (!isTemp) {
+ val db = getPlanField[Option[String]]("databaseName")
+ val functionName = getPlanField[String]("functionName")
+ outputObjs += functionPrivileges(db.orNull, functionName)
+ }
+
+ case "RefreshFunctionCommand" =>
val db = getPlanField[Option[String]]("databaseName")
val functionName = getPlanField[String]("functionName")
outputObjs += functionPrivileges(db.orNull, functionName)
@@ -391,8 +408,18 @@ object PrivilegesBuilder {
inputObjs += databasePrivileges(quote(database))
case "DescribeFunctionCommand" =>
- val func = getPlanField[FunctionIdentifier]("functionName")
- inputObjs += functionPrivileges(func.database.orNull, func.funcName)
+ val (db: Option[String], funName: String) =
+ if (isSparkVersionAtLeast("3.3")) {
+ val info = getPlanField[ExpressionInfo]("info")
+ (Option(info.getDb), info.getName)
+ } else {
+ val funcIdent = getPlanField[FunctionIdentifier]("functionName")
+ (funcIdent.database, funcIdent.funcName)
+ }
+ val isPersistentFun = isPersistentFunction(FunctionIdentifier(funName,
db), spark)
+ if (isPersistentFun) {
+ inputObjs += functionPrivileges(db.orNull, funName)
+ }
case "DropTableCommand" =>
if (!isTempView(getPlanField[TableIdentifier]("tableName"), spark)) {
diff --git
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
index 0396b2293..b4c6dfb7b 100644
---
a/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
+++
b/extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/PrivilegesBuilderSuite.scala
@@ -547,6 +547,37 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
}
}
+ test("Create Temporary Function") {
+ val plan = sql("CREATE TEMPORARY FUNCTION CreateTempFunction AS" +
+ "'org.apache.hadoop.hive.ql.udf.generic.GenericUDFMaskHash'")
+ .queryExecution.analyzed
+ val operationType = OperationType(plan.nodeName)
+ assert(operationType === CREATEFUNCTION)
+ val tuple = PrivilegesBuilder.build(plan, spark)
+ assert(tuple._1.size === 0)
+ assert(tuple._2.size === 0)
+ }
+
+ test("Describe Temporary Function") {
+ val plan = sql("DESCRIBE FUNCTION CreateTempFunction")
+ .queryExecution.analyzed
+ val operationType = OperationType(plan.nodeName)
+ assert(operationType === DESCFUNCTION)
+ val tuple = PrivilegesBuilder.build(plan, spark)
+ assert(tuple._1.size === 0)
+ assert(tuple._2.size === 0)
+ }
+
+ test("Drop Temporary Function") {
+ val plan = sql("DROP TEMPORARY FUNCTION CreateTempFunction")
+ .queryExecution.analyzed
+ val operationType = OperationType(plan.nodeName)
+ assert(operationType === DROPFUNCTION)
+ val tuple = PrivilegesBuilder.build(plan, spark)
+ assert(tuple._1.size === 0)
+ assert(tuple._2.size === 0)
+ }
+
test("CreateFunctionCommand") {
val plan = sql("CREATE FUNCTION CreateFunctionCommand AS 'class_name'")
.queryExecution.analyzed
@@ -566,6 +597,16 @@ abstract class PrivilegesBuilderSuite extends AnyFunSuite
assert(accessType === AccessType.CREATE)
}
+ test("Describe Persistent Function") {
+ val plan = sql("DESCRIBE FUNCTION CreateFunctionCommand")
+ .queryExecution.analyzed
+ val operationType = OperationType(plan.nodeName)
+ assert(operationType === DESCFUNCTION)
+ val tuple = PrivilegesBuilder.build(plan, spark)
+ assert(tuple._1.size === 1)
+ assert(tuple._2.size === 0)
+ }
+
test("DropFunctionCommand") {
sql("CREATE FUNCTION DropFunctionCommand AS 'class_name'")
val plan = sql("DROP FUNCTION DropFunctionCommand")