This is an automated email from the ASF dual-hosted git repository.
liuxun pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/submarine.git
The following commit(s) were added to refs/heads/master by this push:
new e20b448 SUBMARINE-486. DCL Framework: Drop Role Syntax
e20b448 is described below
commit e20b4487f6ceba4d2c52b7d49418a5e2075400ad
Author: Kent Yao <[email protected]>
AuthorDate: Wed Apr 29 11:38:04 2020 +0800
SUBMARINE-486. DCL Framework: Drop Role Syntax
### What is this PR for?
Part of DCL Framework
Add DROP ROLE syntax support
### What type of PR is it?
feature
### Todos
* [ ] - Task
### What is the Jira issue?
https://issues.apache.org/jira/browse/SUBMARINE-486
### How should this be tested?
new test
### Screenshots (if appropriate)
### Questions:
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No
Author: Kent Yao <[email protected]>
Closes #272 from yaooqinn/SUBMARINE-486 and squashes the following commits:
5eb06aa [Kent Yao] SUBMARINE-486. DCL Framework: Drop Role Syntax
---
.../CreateRoleCommand.scala | 11 +----------
...reateRoleCommand.scala => DropRoleCommand.scala} | 13 ++-----------
.../CommandUtils.scala} | 21 ++++++++-------------
.../CreateRoleCommand.scala | 9 ++-------
.../DropRoleCommand.scala} | 19 +++++++++++++------
.../spark/security/parser/SubmarineSqlBase.g4 | 1 +
.../security/parser/SubmarineSqlAstBuilder.scala | 10 ++++++----
.../security/parser/SubmarineSqlParserTest.scala | 19 ++++++++++++++++---
8 files changed, 49 insertions(+), 54 deletions(-)
diff --git
a/submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
b/submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
index 788166b..3efa931 100644
---
a/submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
+++
b/submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
@@ -19,20 +19,11 @@
package org.apache.submarine.spark.security.command
-import java.util.Arrays
-
-import scala.util.control.NonFatal
-
-import org.apache.hadoop.security.UserGroupInformation
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.execution.command.RunnableCommand
-import org.apache.submarine.spark.security.{RangerSparkAuditHandler,
RangerSparkPlugin, SparkAccessControlException}
-
-
case class CreateRoleCommand(roleName: String) extends RunnableCommand {
- import CreateRoleCommand._
override def run(sparkSession: SparkSession): Seq[Row] = {
- Seq.empty[Row]
+ throw new UnsupportedOperationException("CREATE ROLE")
}
}
diff --git
a/submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
b/submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/DropRoleCommand.scala
similarity index 74%
copy from
submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
copy to
submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/DropRoleCommand.scala
index 788166b..e8fc57c 100644
---
a/submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
+++
b/submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/DropRoleCommand.scala
@@ -19,20 +19,11 @@
package org.apache.submarine.spark.security.command
-import java.util.Arrays
-
-import scala.util.control.NonFatal
-
-import org.apache.hadoop.security.UserGroupInformation
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.execution.command.RunnableCommand
-import org.apache.submarine.spark.security.{RangerSparkAuditHandler,
RangerSparkPlugin, SparkAccessControlException}
-
-
-case class CreateRoleCommand(roleName: String) extends RunnableCommand {
- import CreateRoleCommand._
+case class DropRoleCommand (roleName: String) extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
- Seq.empty[Row]
+ throw new UnsupportedOperationException("DROP ROLE")
}
}
diff --git
a/submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
b/submarine-security/spark-security/ranger-2/src/main/scala/org.apache.submarine.spark.security.command/CommandUtils.scala
similarity index 62%
copy from
submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
copy to
submarine-security/spark-security/ranger-2/src/main/scala/org.apache.submarine.spark.security.command/CommandUtils.scala
index 788166b..3657553 100644
---
a/submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
+++
b/submarine-security/spark-security/ranger-2/src/main/scala/org.apache.submarine.spark.security.command/CommandUtils.scala
@@ -19,20 +19,15 @@
package org.apache.submarine.spark.security.command
-import java.util.Arrays
+private[command] object CommandUtils {
-import scala.util.control.NonFatal
+ final val RESERVED_ROLE_NAMES = Set("ALL", "DEFAULT", "NONE")
-import org.apache.hadoop.security.UserGroupInformation
-import org.apache.spark.sql.{Row, SparkSession}
-import org.apache.spark.sql.execution.command.RunnableCommand
-
-import org.apache.submarine.spark.security.{RangerSparkAuditHandler,
RangerSparkPlugin, SparkAccessControlException}
-
-
-case class CreateRoleCommand(roleName: String) extends RunnableCommand {
- import CreateRoleCommand._
- override def run(sparkSession: SparkSession): Seq[Row] = {
- Seq.empty[Row]
+ def validateRoleName(roleName: String): Unit = {
+ if (RESERVED_ROLE_NAMES.exists(roleName.equalsIgnoreCase)) {
+ throw new IllegalArgumentException(s"Role name cannot be one of the
reserved roles: " +
+ s"${RESERVED_ROLE_NAMES.mkString(",")}")
+ }
}
+
}
diff --git
a/submarine-security/spark-security/ranger-2/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
b/submarine-security/spark-security/ranger-2/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
index 59e55f3..ae394b7 100644
---
a/submarine-security/spark-security/ranger-2/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
+++
b/submarine-security/spark-security/ranger-2/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
@@ -32,11 +32,10 @@ import
org.apache.submarine.spark.security.{RangerSparkAuditHandler, RangerSpark
case class CreateRoleCommand(roleName: String) extends RunnableCommand {
- import CreateRoleCommand._
+ import CommandUtils._
override def run(sparkSession: SparkSession): Seq[Row] = {
- require(!RESERVED_ROLE_NAMES.contains(roleName),
- s"Role name cannot be one of the reserved roles:
${RESERVED_ROLE_NAMES.mkString(",")}")
+ validateRoleName(roleName)
val auditHandler = RangerSparkAuditHandler()
val currentUser = UserGroupInformation.getCurrentUser.getShortUserName
@@ -58,7 +57,3 @@ case class CreateRoleCommand(roleName: String) extends
RunnableCommand {
}
}
}
-
-object CreateRoleCommand {
- final val RESERVED_ROLE_NAMES = Set("ALL", "DEFAULT", "NONE")
-}
diff --git
a/submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
b/submarine-security/spark-security/ranger-2/src/main/scala/org.apache.submarine.spark.security.command/DropRoleCommand.scala
similarity index 71%
copy from
submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
copy to
submarine-security/spark-security/ranger-2/src/main/scala/org.apache.submarine.spark.security.command/DropRoleCommand.scala
index 788166b..230bdb5 100644
---
a/submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
+++
b/submarine-security/spark-security/ranger-2/src/main/scala/org.apache.submarine.spark.security.command/DropRoleCommand.scala
@@ -19,8 +19,6 @@
package org.apache.submarine.spark.security.command
-import java.util.Arrays
-
import scala.util.control.NonFatal
import org.apache.hadoop.security.UserGroupInformation
@@ -29,10 +27,19 @@ import
org.apache.spark.sql.execution.command.RunnableCommand
import org.apache.submarine.spark.security.{RangerSparkAuditHandler,
RangerSparkPlugin, SparkAccessControlException}
-
-case class CreateRoleCommand(roleName: String) extends RunnableCommand {
- import CreateRoleCommand._
+case class DropRoleCommand(roleName: String) extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
- Seq.empty[Row]
+ CommandUtils.validateRoleName(roleName)
+
+ try {
+ val auditHandler = RangerSparkAuditHandler()
+ val currentUser = UserGroupInformation.getCurrentUser.getShortUserName
+ RangerSparkPlugin.dropRole(currentUser, roleName, auditHandler)
+ Seq.empty[Row]
+ } catch {
+ case NonFatal(e) => throw new SparkAccessControlException(e.getMessage,
e)
+ } finally {
+ // TODO: support auditHandler.flushAudit()
+ }
}
}
diff --git
a/submarine-security/spark-security/src/main/antlr4/org/apache/submarine/spark/security/parser/SubmarineSqlBase.g4
b/submarine-security/spark-security/src/main/antlr4/org/apache/submarine/spark/security/parser/SubmarineSqlBase.g4
index ee44502..34f0d3d 100644
---
a/submarine-security/spark-security/src/main/antlr4/org/apache/submarine/spark/security/parser/SubmarineSqlBase.g4
+++
b/submarine-security/spark-security/src/main/antlr4/org/apache/submarine/spark/security/parser/SubmarineSqlBase.g4
@@ -30,6 +30,7 @@ singleStatement
statement
: CREATE ROLE identifier
#createRole
+ | DROP ROLE identifier
#dropRole
;
identifier
diff --git
a/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/parser/SubmarineSqlAstBuilder.scala
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/parser/SubmarineSqlAstBuilder.scala
index 49217c5..0b32db3 100644
---
a/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/parser/SubmarineSqlAstBuilder.scala
+++
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/parser/SubmarineSqlAstBuilder.scala
@@ -19,12 +19,10 @@
package org.apache.submarine.spark.security.parser
-import java.util.Locale
-
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.submarine.spark.security.command.CreateRoleCommand
-import
org.apache.submarine.spark.security.parser.SubmarineSqlBaseParser.{CreateRoleContext,
SingleStatementContext}
+import org.apache.submarine.spark.security.command.{CreateRoleCommand,
DropRoleCommand}
+import
org.apache.submarine.spark.security.parser.SubmarineSqlBaseParser.{CreateRoleContext,
DropRoleContext, SingleStatementContext}
class SubmarineSqlAstBuilder extends SubmarineSqlBaseBaseVisitor[AnyRef] {
@@ -35,4 +33,8 @@ class SubmarineSqlAstBuilder extends
SubmarineSqlBaseBaseVisitor[AnyRef] {
override def visitCreateRole(ctx: CreateRoleContext): AnyRef = {
CreateRoleCommand(ctx.identifier().getText)
}
+
+ override def visitDropRole(ctx: DropRoleContext): AnyRef = {
+ DropRoleCommand(ctx.identifier().getText)
+ }
}
diff --git
a/submarine-security/spark-security/src/test/scala/org/apache/submarine/spark/security/parser/SubmarineSqlParserTest.scala
b/submarine-security/spark-security/src/test/scala/org/apache/submarine/spark/security/parser/SubmarineSqlParserTest.scala
index 549c0a8..9adfa32 100644
---
a/submarine-security/spark-security/src/test/scala/org/apache/submarine/spark/security/parser/SubmarineSqlParserTest.scala
+++
b/submarine-security/spark-security/src/test/scala/org/apache/submarine/spark/security/parser/SubmarineSqlParserTest.scala
@@ -23,15 +23,15 @@ import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.hive.test.TestHive
import org.scalatest.FunSuite
-import org.apache.submarine.spark.security.command.CreateRoleCommand
+import org.apache.submarine.spark.security.command.{CreateRoleCommand,
DropRoleCommand}
class SubmarineSqlParserTest extends FunSuite {
private val spark = TestHive.sparkSession.newSession()
- test("create role") {
- val parser = new SubmarineSqlParser(spark.sessionState.sqlParser)
+ val parser = new SubmarineSqlParser(spark.sessionState.sqlParser)
+ test("create role") {
val p1 = parser.parsePlan("create role abc")
assert(p1.isInstanceOf[CreateRoleCommand])
assert(p1.asInstanceOf[CreateRoleCommand].roleName === "abc")
@@ -43,4 +43,17 @@ class SubmarineSqlParserTest extends FunSuite {
assert(p3.asInstanceOf[CreateRoleCommand].roleName === "`bob`")
intercept[ParseException](parser.parsePlan("create role 'bob'"))
}
+
+ test("drop role") {
+ val p1 = parser.parsePlan("drop role abc")
+ assert(p1.isInstanceOf[DropRoleCommand])
+ assert(p1.asInstanceOf[DropRoleCommand].roleName === "abc")
+ val p2 = parser.parsePlan("drop role admin")
+ assert(p2.isInstanceOf[DropRoleCommand])
+ assert(p2.asInstanceOf[DropRoleCommand].roleName === "admin")
+ val p3 = parser.parsePlan("drop role `bob`")
+ assert(p3.isInstanceOf[DropRoleCommand])
+ assert(p3.asInstanceOf[DropRoleCommand].roleName === "`bob`")
+ intercept[ParseException](parser.parsePlan("drop role 'bob'"))
+ }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]