This is an automated email from the ASF dual-hosted git repository.
liuxun pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/submarine.git
The following commit(s) were added to refs/heads/master by this push:
new db27dbf SUBMARINE-415. Show databases command to allow exposing
filtered databases
db27dbf is described below
commit db27dbf8ba238748155b3d970de11df5b5a97ae3
Author: Kent Yao <[email protected]>
AuthorDate: Tue Mar 10 00:54:34 2020 +0800
SUBMARINE-415. Show databases command to allow exposing filtered databases
### What is this PR for?
add a new Command to replace spark's own ShowTablesCommand to filter out
these not allowed.
### What type of PR is it?
Improvement
### Todos
* [ ] - Row-level filtering
* [ ] - Datamasking filtering
* [ ] - Configuration Restriction
### What is the Jira issue?
* Open an issue on Jira https://issues.apache.org/jira/browse/SUBMARINE-413
### How should this be tested?
unit tests added
### Screenshots (if appropriate)
### Questions:
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? Yes, will do after finish all
Author: Kent Yao <[email protected]>
Closes #216 from yaooqinn/SUBMARINE-415 and squashes the following commits:
c3837ea [Kent Yao] SUBMARINE-415. Custom show tables command to allow
exposing filtered tables
---
...bmarineSparkRangerAuthorizationExtension.scala} | 12 +++--
.../sql/execution/SubmarineShowTablesCommand.scala | 38 ++++++++++++++
.../spark/security/RangerSparkSQLExtension.scala | 4 +-
.../apache/spark/sql/RangerSparkTestUtils.scala | 4 +-
...rineSparkRangerAuthorizationExtensionTest.scala | 59 ++++++++++++++++++++++
.../spark/security/SparkRangerAuthorizerTest.scala | 22 ++++++++
6 files changed, 130 insertions(+), 9 deletions(-)
diff --git
a/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/catalyst/optimizer/RangerSparkAuthorizerExtension.scala
b/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/catalyst/optimizer/SubmarineSparkRangerAuthorizationExtension.scala
similarity index 94%
rename from
submarine-security/spark-security/src/main/scala/org/apache/spark/sql/catalyst/optimizer/RangerSparkAuthorizerExtension.scala
rename to
submarine-security/spark-security/src/main/scala/org/apache/spark/sql/catalyst/optimizer/SubmarineSparkRangerAuthorizationExtension.scala
index 8fa110c..74cdc40 100644
---
a/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/catalyst/optimizer/RangerSparkAuthorizerExtension.scala
+++
b/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/catalyst/optimizer/SubmarineSparkRangerAuthorizationExtension.scala
@@ -26,18 +26,18 @@ import
org.apache.spark.sql.catalyst.plans.logical.{Command, LogicalPlan}
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.execution.command.{AlterDatabasePropertiesCommand,
AlterTableAddPartitionCommand, AlterTableDropPartitionCommand,
AlterTableRecoverPartitionsCommand, AlterTableRenameCommand,
AlterTableRenamePartitionCommand, AlterTableSerDePropertiesCommand,
AlterTableSetLocationCommand, AlterTableSetPropertiesCommand,
AlterTableUnsetPropertiesCommand, AlterViewAsCommand, AnalyzeColumnCommand,
AnalyzeTableCommand, CacheTableCommand, CreateDatabaseCommand,
CreateDataSourceTabl [...]
import org.apache.spark.sql.execution.datasources.{CreateTempViewUsing,
InsertIntoDataSourceCommand, InsertIntoHadoopFsRelationCommand}
-import org.apache.spark.sql.execution.SubmarineShowDatabasesCommand
+import org.apache.spark.sql.execution.{SubmarineShowDatabasesCommand,
SubmarineShowTablesCommand}
import org.apache.submarine.spark.security.{RangerSparkAuthorizer,
SparkAccessControlException}
/**
- * An Optimizer Rule to do Hive Authorization V2 for Spark SQL.
+ * An Optimizer Rule to do SQL standard ACL for Spark SQL.
*
- * For Apache Spark 2.2.x and later
+ * For Apache Spark 2.3.x and later
*/
-case class RangerSparkAuthorizerExtension(spark: SparkSession) extends
Rule[LogicalPlan] {
+case class SubmarineSparkRangerAuthorizationExtension(spark: SparkSession)
extends Rule[LogicalPlan] {
import org.apache.submarine.spark.security.SparkOperationType._
- private val LOG = LogFactory.getLog(classOf[RangerSparkAuthorizerExtension])
+ private val LOG =
LogFactory.getLog(classOf[SubmarineSparkRangerAuthorizationExtension])
/**
* Visit the [[LogicalPlan]] recursively to get all spark privilege objects,
check the privileges
@@ -51,6 +51,8 @@ case class RangerSparkAuthorizerExtension(spark:
SparkSession) extends Rule[Logi
plan match {
case s: ShowDatabasesCommand => SubmarineShowDatabasesCommand(s)
case s: SubmarineShowDatabasesCommand => s
+ case s: ShowTablesCommand => SubmarineShowTablesCommand(s)
+ case s: SubmarineShowTablesCommand => s
case _ =>
val operationType: SparkOperationType = toOperationType(plan)
val (in, out) = PrivilegesBuilder.build(plan)
diff --git
a/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/execution/SubmarineShowTablesCommand.scala
b/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/execution/SubmarineShowTablesCommand.scala
new file mode 100644
index 0000000..d9376e7
--- /dev/null
+++
b/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/execution/SubmarineShowTablesCommand.scala
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution
+
+import org.apache.spark.sql.{Row, SparkSession}
+import org.apache.spark.sql.catalyst.expressions.Attribute
+import org.apache.spark.sql.execution.command.{RunnableCommand,
ShowTablesCommand}
+import org.apache.submarine.spark.security.{RangerSparkAuthorizer,
SparkPrivilegeObject, SparkPrivilegeObjectType}
+
+case class SubmarineShowTablesCommand(child: ShowTablesCommand) extends
RunnableCommand {
+
+ override val output: Seq[Attribute] = child.output
+ override def run(sparkSession: SparkSession): Seq[Row] = {
+ val rows = child.run(sparkSession)
+ rows.filter(r =>
RangerSparkAuthorizer.isAllowed(toSparkPrivilegeObject(r)))
+ }
+
+ private def toSparkPrivilegeObject(row: Row): SparkPrivilegeObject = {
+ val database = row.getString(0)
+ val table = row.getString(1)
+ new SparkPrivilegeObject(SparkPrivilegeObjectType.TABLE_OR_VIEW, database,
table)
+ }
+}
diff --git
a/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/RangerSparkSQLExtension.scala
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/RangerSparkSQLExtension.scala
index 7e71ad6..daf4d03 100644
---
a/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/RangerSparkSQLExtension.scala
+++
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/RangerSparkSQLExtension.scala
@@ -18,10 +18,10 @@
package org.apache.submarine.spark.security
import org.apache.spark.sql.SparkSessionExtensions
-import org.apache.spark.sql.catalyst.optimizer.RangerSparkAuthorizerExtension
+import
org.apache.spark.sql.catalyst.optimizer.SubmarineSparkRangerAuthorizationExtension
class RangerSparkSQLExtension extends Extensions {
override def apply(ext: SparkSessionExtensions): Unit = {
- ext.injectOptimizerRule(RangerSparkAuthorizerExtension)
+ ext.injectOptimizerRule(SubmarineSparkRangerAuthorizationExtension)
}
}
diff --git
a/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/RangerSparkTestUtils.scala
b/submarine-security/spark-security/src/test/scala/org/apache/spark/sql/RangerSparkTestUtils.scala
similarity index 87%
rename from
submarine-security/spark-security/src/main/scala/org/apache/spark/sql/RangerSparkTestUtils.scala
rename to
submarine-security/spark-security/src/test/scala/org/apache/spark/sql/RangerSparkTestUtils.scala
index 60031c7..121afe3 100644
---
a/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/RangerSparkTestUtils.scala
+++
b/submarine-security/spark-security/src/test/scala/org/apache/spark/sql/RangerSparkTestUtils.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql
import java.security.PrivilegedExceptionAction
import org.apache.hadoop.security.UserGroupInformation
-import org.apache.spark.sql.catalyst.optimizer.RangerSparkAuthorizerExtension
+import
org.apache.spark.sql.catalyst.optimizer.SubmarineSparkRangerAuthorizationExtension
object RangerSparkTestUtils {
@@ -32,6 +32,6 @@ object RangerSparkTestUtils {
}
def enableAuthorizer(spark: SparkSession): Unit = {
- spark.extensions.injectOptimizerRule(RangerSparkAuthorizerExtension)
+
spark.extensions.injectOptimizerRule(SubmarineSparkRangerAuthorizationExtension)
}
}
diff --git
a/submarine-security/spark-security/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SubmarineSparkRangerAuthorizationExtensionTest.scala
b/submarine-security/spark-security/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SubmarineSparkRangerAuthorizationExtensionTest.scala
new file mode 100644
index 0000000..6c10283
--- /dev/null
+++
b/submarine-security/spark-security/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SubmarineSparkRangerAuthorizationExtensionTest.scala
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.optimizer
+
+import org.scalatest.{BeforeAndAfterAll, FunSuite}
+import org.apache.spark.sql.execution.command.{CreateDatabaseCommand,
ShowDatabasesCommand, ShowTablesCommand}
+import org.apache.spark.sql.execution.{SubmarineShowDatabasesCommand,
SubmarineShowTablesCommand}
+import org.apache.spark.sql.hive.test.TestHive
+import org.apache.submarine.spark.security.SparkAccessControlException
+
+class SubmarineSparkRangerAuthorizationExtensionTest extends FunSuite with
BeforeAndAfterAll {
+
+ private val spark = TestHive.sparkSession.newSession()
+
+ private val authz = SubmarineSparkRangerAuthorizationExtension(spark)
+
+ test("replace submarine show databases") {
+ val df = spark.sql("show databases")
+ val originalPlan = df.queryExecution.optimizedPlan
+ assert(originalPlan.isInstanceOf[ShowDatabasesCommand])
+ val newPlan = authz(originalPlan)
+ assert(newPlan.isInstanceOf[SubmarineShowDatabasesCommand])
+ }
+
+ test("replace submarine show tables") {
+ val df = spark.sql("show tables")
+ val originalPlan = df.queryExecution.optimizedPlan
+ assert(originalPlan.isInstanceOf[ShowTablesCommand])
+ val newPlan = authz(originalPlan)
+ assert(newPlan.isInstanceOf[SubmarineShowTablesCommand])
+ }
+
+ test("fail to create database by default") {
+ try {
+ val df = spark.sql("create database testdb1")
+ val originalPlan = df.queryExecution.optimizedPlan
+ assert(originalPlan.isInstanceOf[CreateDatabaseCommand])
+ intercept[SparkAccessControlException](authz(originalPlan))
+ } finally {
+ spark.sql("drop database testdb1")
+ }
+ }
+
+}
diff --git
a/submarine-security/spark-security/src/test/scala/org/apache/submarine/spark/security/SparkRangerAuthorizerTest.scala
b/submarine-security/spark-security/src/test/scala/org/apache/submarine/spark/security/SparkRangerAuthorizerTest.scala
index 06fc247..e1373d1 100644
---
a/submarine-security/spark-security/src/test/scala/org/apache/submarine/spark/security/SparkRangerAuthorizerTest.scala
+++
b/submarine-security/spark-security/src/test/scala/org/apache/submarine/spark/security/SparkRangerAuthorizerTest.scala
@@ -75,9 +75,22 @@ class SparkRangerAuthorizerTest extends FunSuite with
BeforeAndAfterAll {
withUser("kent") {
assert(sql("show databases").count() === 2)
}
+
+ withUser("alice") {
+ assert(sql("show tables").count() === 7)
+ }
+ withUser("bob") {
+ assert(sql("show tables").count() === 7)
+ }
+
enableAuthorizer(spark)
}
+ override def afterAll(): Unit = {
+ super.afterAll()
+ spark.reset()
+ }
+
test("show databases") {
withUser("alice") {
assert(sql("show databases").count() === 0)
@@ -91,6 +104,15 @@ class SparkRangerAuthorizerTest extends FunSuite with
BeforeAndAfterAll {
}
}
+ test("show tables") {
+ withUser("alice") {
+ assert(sql("show tables").count() === 0)
+ }
+ withUser("bob") {
+ assert(sql("show tables").count() === 7)
+ }
+ }
+
test("use database") {
withUser("alice") {
val e = intercept[SparkAccessControlException](sql("use default"))
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]