This is an automated email from the ASF dual-hosted git repository.
liuxun pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/submarine.git
The following commit(s) were added to refs/heads/master by this push:
new c3994e7 SUBMARINE-421. Support restrict spark configurations
c3994e7 is described below
commit c3994e75bc46f9d7cb923862e34135e07d81d756
Author: Kent Yao <[email protected]>
AuthorDate: Thu Mar 12 16:11:06 2020 +0800
SUBMARINE-421. Support restrict spark configurations
### What is this PR for?
ban end-user from security settings
### What type of PR is it?
Feature
### Todos
### What is the Jira issue?
* Open an issue on Jira https://issues.apache.org/jira/browse/SUBMARINE-421
* Put link here, and add [SUBMARINE-*Jira number*] in PR title, eg.
[SUBMARINE-23]
### How should this be tested?
add ut
### Screenshots (if appropriate)
### Questions:
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No
Author: Kent Yao <[email protected]>
Closes #225 from yaooqinn/SUBMARINE-421 and squashes the following commits:
e99330e [Kent Yao] add test
4888100 [Kent Yao] SUBMARINE-421. Support restrict spark configurations
---
.../SubmarineConfigurationCheckExtension.scala | 49 ++++++++++++++++++++
.../spark/security/RangerSparkSQLExtension.scala | 3 +-
.../SubmarineConfigurationCheckExtensionTest.scala | 54 ++++++++++++++++++++++
3 files changed, 105 insertions(+), 1 deletion(-)
diff --git
a/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/catalyst/optimizer/SubmarineConfigurationCheckExtension.scala
b/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/catalyst/optimizer/SubmarineConfigurationCheckExtension.scala
new file mode 100644
index 0000000..7bd6d89
--- /dev/null
+++
b/submarine-security/spark-security/src/main/scala/org/apache/spark/sql/catalyst/optimizer/SubmarineConfigurationCheckExtension.scala
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.spark.sql.catalyst.optimizer
+
+import org.apache.spark.sql.SparkSession
+import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+import org.apache.spark.sql.execution.command.SetCommand
+import org.apache.submarine.spark.security.SparkAccessControlException
+
+/**
+ * For banning end-users from set restricted spark configurations
+ */
+case class SubmarineConfigurationCheckExtension(spark: SparkSession)
+ extends (LogicalPlan => Unit) {
+
+ final val RESTRICT_LIST_KEY = "spark.sql.submarine.conf.restricted.list"
+
+ private val bannedList: Seq[String] =
+ RESTRICT_LIST_KEY ::
+ "spark.sql.runSQLOnFiles" ::
+ "spark.sql.extensions" ::
+
spark.conf.getOption(RESTRICT_LIST_KEY).map(_.split(',').toList).getOrElse(Nil)
+
+ override def apply(plan: LogicalPlan): Unit = plan match {
+ case SetCommand(Some(("spark.sql.optimizer.excludedRules", Some(v))))
+ if v.contains("Submarine") =>
+ throw new SparkAccessControlException("Excluding Submarine security
rules is not allowed")
+ case SetCommand(Some((k, Some(_)))) if bannedList.contains(k) =>
+ throw new SparkAccessControlException(s"Modifying $k is not allowed")
+ case _ =>
+ }
+}
diff --git
a/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/RangerSparkSQLExtension.scala
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/RangerSparkSQLExtension.scala
index 3f29908..1dc1ad0 100644
---
a/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/RangerSparkSQLExtension.scala
+++
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/RangerSparkSQLExtension.scala
@@ -20,11 +20,12 @@
package org.apache.submarine.spark.security
import org.apache.spark.sql.SparkSessionExtensions
-import org.apache.spark.sql.catalyst.optimizer.{SubmarineDataMaskingExtension,
SubmarineRowFilterExtension, SubmarineSparkRangerAuthorizationExtension}
+import
org.apache.spark.sql.catalyst.optimizer.{SubmarineConfigurationCheckExtension,
SubmarineDataMaskingExtension, SubmarineRowFilterExtension,
SubmarineSparkRangerAuthorizationExtension}
import org.apache.spark.sql.execution.SubmarineSparkPlanOmitStrategy
class RangerSparkSQLExtension extends Extensions {
override def apply(ext: SparkSessionExtensions): Unit = {
+ ext.injectCheckRule(SubmarineConfigurationCheckExtension)
ext.injectOptimizerRule(SubmarineSparkRangerAuthorizationExtension)
ext.injectOptimizerRule(SubmarineRowFilterExtension)
ext.injectOptimizerRule(SubmarineDataMaskingExtension)
diff --git
a/submarine-security/spark-security/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SubmarineConfigurationCheckExtensionTest.scala
b/submarine-security/spark-security/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SubmarineConfigurationCheckExtensionTest.scala
new file mode 100644
index 0000000..877c223
--- /dev/null
+++
b/submarine-security/spark-security/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SubmarineConfigurationCheckExtensionTest.scala
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.spark.sql.catalyst.optimizer
+
+import org.apache.spark.sql.hive.test.TestHive
+import org.apache.submarine.spark.security.SparkAccessControlException
+import org.scalatest.{BeforeAndAfterAll, FunSuite}
+
+class SubmarineConfigurationCheckExtensionTest extends FunSuite with
BeforeAndAfterAll{
+
+ private val spark = TestHive.sparkSession.newSession()
+
+ override def afterAll(): Unit = {
+ super.afterAll()
+ spark.reset()
+ }
+
+ test("apply spark configuration restriction rules") {
+ spark.sql("set
spark.sql.submarine.conf.restricted.list=spark.sql.abc,spark.sql.xyz")
+ val extension = SubmarineConfigurationCheckExtension(spark)
+ val p1 = spark.sql("set
spark.sql.runSQLOnFiles=true").queryExecution.optimizedPlan
+ intercept[SparkAccessControlException](extension.apply(p1))
+ val p2 = spark.sql("set
spark.sql.runSQLOnFiles=false").queryExecution.optimizedPlan
+ intercept[SparkAccessControlException](extension.apply(p2))
+ val p3 = spark.sql("set
spark.sql.runSQLOnFiles;").queryExecution.optimizedPlan
+ extension.apply(p3)
+ val p4 = spark.sql("set spark.sql.abc=xyz").queryExecution.optimizedPlan
+ intercept[SparkAccessControlException](extension.apply(p4))
+ val p5 = spark.sql("set spark.sql.xyz=abc").queryExecution.optimizedPlan
+ intercept[SparkAccessControlException](extension.apply(p5))
+ val p6 = spark.sql("set spark.sql.submarine.conf.restricted.list=123")
+ .queryExecution.optimizedPlan
+ intercept[SparkAccessControlException](extension.apply(p6))
+ val p7 = spark.sql("set spark.sql.efg=hijk;").queryExecution.optimizedPlan
+ extension.apply(p7)
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]