This is an automated email from the ASF dual-hosted git repository.

liuxun pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/submarine.git


The following commit(s) were added to refs/heads/master by this push:
     new ef02ebe  SUBMARINE-484. DCL Framework: Basic antlr4 grammar support 
with create role as an example
ef02ebe is described below

commit ef02ebe6b4b20828ce2ad3972533175c1629a597
Author: Kent Yao <[email protected]>
AuthorDate: Mon Apr 27 19:11:11 2020 +0800

    SUBMARINE-484. DCL Framework: Basic antlr4 grammar support with create role 
as an example
    
    ### What is this PR for?
    
    Part of DCL(Data Control Language) Framework Support
    
    1. basic antlr4 parser for DCL SQLs
    2. create role syntax added as an example
    
    ### What type of PR is it?
    
    Feature
    
    ### Todos
    * [ ] - Task
    
    ### What is the Jira issue?
     https://issues.apache.org/jira/browse/SUBMARINE-484
    
    ### How should this be tested?
    new unit tests
    
    ### Screenshots (if appropriate)
    
    ### Questions:
    * Does the licenses files need update? No
    * Is there breaking changes for older versions? No
    * Does this needs documentation? No
    
    Author: Kent Yao <[email protected]>
    
    Closes #269 from yaooqinn/SUBMARINE-484 and squashes the following commits:
    
    4e80b5f [Kent Yao] nit
    00633ad [Kent Yao] SUBMARINE-484. DCL Framework: Basic antlr4 grammar 
support with create role as an example
---
 submarine-security/spark-security/pom.xml          |  46 +++++++++
 .../CreateRoleCommand.scala}                       |  20 +++-
 .../CreateRoleCommand.scala                        |  64 ++++++++++++
 .../spark/security/parser/SubmarineSqlBase.g4      | 105 ++++++++++++++++++++
 .../security/SparkAccessControlException.scala     |   7 +-
 .../security/api/RangerSparkDCLExtension.scala     |  62 ++++++++++++
 .../SubmarineSqlAstBuilder.scala}                  |  20 +++-
 .../spark/security/parser/SubmarineSqlParser.scala | 107 +++++++++++++++++++++
 .../security/parser/UpperCaseCharStream.scala      |  59 ++++++++++++
 .../security/parser/SubmarineSqlParserTest.scala   |  46 +++++++++
 10 files changed, 531 insertions(+), 5 deletions(-)

diff --git a/submarine-security/spark-security/pom.xml 
b/submarine-security/spark-security/pom.xml
index bc696f5..3df79ff 100644
--- a/submarine-security/spark-security/pom.xml
+++ b/submarine-security/spark-security/pom.xml
@@ -34,6 +34,7 @@
   <artifactId>submarine-spark-security</artifactId>
 
   <properties>
+    <antlr4.version>4.7</antlr4.version>
     <eclipse.jpa.version>2.5.2</eclipse.jpa.version>
     <gson.version>2.2.4</gson.version>
     
<httpcomponents.httpclient.version>4.5.3</httpcomponents.httpclient.version>
@@ -44,6 +45,7 @@
     <noggit.version>0.6</noggit.version>
     <ranger.spark.package>submarine_spark_ranger_project</ranger.spark.package>
     <ranger.version>1.1.0</ranger.version>
+    <ranger.major.version>1</ranger.major.version>
     <scala.version>2.11.8</scala.version>
     <scala.binary.version>2.11</scala.binary.version>
     <scalatest.version>2.2.6</scalatest.version>
@@ -235,6 +237,12 @@
       </exclusions>
     </dependency>
 
+    <dependency>
+      <groupId>org.antlr</groupId>
+      <artifactId>antlr4-runtime</artifactId>
+      <version>${antlr4.version}</version>
+    </dependency>
+
     <!-- unit tests-->
     <dependency>
       <groupId>org.scalatest</groupId>
@@ -289,6 +297,25 @@
     </testResources>
     <plugins>
       <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>add-source</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>ranger-${ranger.major.version}/src/main/scala</source>
+                <source>target/generated-sources/antlr4</source>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
         <groupId>net.alchim31.maven</groupId>
         <artifactId>scala-maven-plugin</artifactId>
         <version>3.2.2</version>
@@ -465,6 +492,24 @@
       </plugin>
 
       <plugin>
+        <groupId>org.antlr</groupId>
+        <artifactId>antlr4-maven-plugin</artifactId>
+        <version>${antlr4.version}</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>antlr4</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <visitor>true</visitor>
+          <sourceDirectory>./src/main/antlr4</sourceDirectory>
+          <treatWarningsAsErrors>true</treatWarningsAsErrors>
+        </configuration>
+      </plugin>
+
+      <plugin>
         <groupId>org.jacoco</groupId>
         <artifactId>jacoco-maven-plugin</artifactId>
         <version>0.8.0</version>
@@ -561,6 +606,7 @@
         
<httpcomponents.httpcore.version>4.4.6</httpcomponents.httpcore.version>
         
<httpcomponents.httpmime.version>4.5.3</httpcomponents.httpmime.version>
         <ranger.version>2.0.0</ranger.version>
+        <ranger.major.version>2</ranger.major.version>
         <solr.version>7.7.1</solr.version>
         <gethostname4j.scope>compile</gethostname4j.scope>
         <jna.scope>compile</jna.scope>
diff --git 
a/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/SparkAccessControlException.scala
 
b/submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
similarity index 58%
copy from 
submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/SparkAccessControlException.scala
copy to 
submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
index 66eb5f7..788166b 100644
--- 
a/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/SparkAccessControlException.scala
+++ 
b/submarine-security/spark-security/ranger-1/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
@@ -17,6 +17,22 @@
  * under the License.
  */
 
-package org.apache.submarine.spark.security
+package org.apache.submarine.spark.security.command
 
-class SparkAccessControlException(msg: String) extends Exception(msg)
+import java.util.Arrays
+
+import scala.util.control.NonFatal
+
+import org.apache.hadoop.security.UserGroupInformation
+import org.apache.spark.sql.{Row, SparkSession}
+import org.apache.spark.sql.execution.command.RunnableCommand
+
+import org.apache.submarine.spark.security.{RangerSparkAuditHandler, 
RangerSparkPlugin, SparkAccessControlException}
+
+
+case class CreateRoleCommand(roleName: String) extends RunnableCommand {
+  import CreateRoleCommand._
+  override def run(sparkSession: SparkSession): Seq[Row] = {
+    Seq.empty[Row]
+  }
+}
diff --git 
a/submarine-security/spark-security/ranger-2/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
 
b/submarine-security/spark-security/ranger-2/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
new file mode 100644
index 0000000..59e55f3
--- /dev/null
+++ 
b/submarine-security/spark-security/ranger-2/src/main/scala/org.apache.submarine.spark.security.command/CreateRoleCommand.scala
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.submarine.spark.security.command
+
+import java.util.Arrays
+
+import scala.util.control.NonFatal
+
+import org.apache.hadoop.security.UserGroupInformation
+import org.apache.ranger.plugin.model.RangerRole
+import org.apache.spark.sql.{Row, SparkSession}
+import org.apache.spark.sql.execution.command.RunnableCommand
+
+import org.apache.submarine.spark.security.{RangerSparkAuditHandler, 
RangerSparkPlugin, SparkAccessControlException}
+
+
+case class CreateRoleCommand(roleName: String) extends RunnableCommand {
+  import CreateRoleCommand._
+  override def run(sparkSession: SparkSession): Seq[Row] = {
+
+    require(!RESERVED_ROLE_NAMES.contains(roleName),
+      s"Role name cannot be one of the reserved roles: 
${RESERVED_ROLE_NAMES.mkString(",")}")
+    val auditHandler = RangerSparkAuditHandler()
+    val currentUser = UserGroupInformation.getCurrentUser.getShortUserName
+
+    val role = new RangerRole()
+    role.setName(roleName)
+    role.setCreatedByUser(currentUser)
+    role.setCreatedBy(currentUser)
+    role.setUpdatedBy(currentUser)
+    val member = new RangerRole.RoleMember(currentUser, true)
+    role.setUsers(Arrays.asList(member))
+    try {
+      val res = RangerSparkPlugin.createRole(role, auditHandler)
+      logDebug(s"Create role: ${res.getName} success")
+      Seq.empty[Row]
+    } catch {
+      case NonFatal(e) => throw new SparkAccessControlException(e.getMessage, 
e)
+    } finally {
+      // TODO: support auditHandler.flushAudit()
+    }
+  }
+}
+
+object CreateRoleCommand {
+  final val RESERVED_ROLE_NAMES = Set("ALL", "DEFAULT", "NONE")
+}
diff --git 
a/submarine-security/spark-security/src/main/antlr4/org/apache/submarine/spark/security/parser/SubmarineSqlBase.g4
 
b/submarine-security/spark-security/src/main/antlr4/org/apache/submarine/spark/security/parser/SubmarineSqlBase.g4
new file mode 100644
index 0000000..ee44502
--- /dev/null
+++ 
b/submarine-security/spark-security/src/main/antlr4/org/apache/submarine/spark/security/parser/SubmarineSqlBase.g4
@@ -0,0 +1,105 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/*
+ * The idea an part of the orignal code is adpoted from Apache Spark project
+ * We should obey the same Apache License 2.0 too.
+ */
+
+grammar SubmarineSqlBase;
+
+singleStatement
+    : statement EOF
+    ;
+
+statement
+    : CREATE ROLE identifier                                           
#createRole
+    ;
+
+identifier
+    : IDENTIFIER                                                       
#unquotedIdentifier
+    | quotedIdentifier                                                 
#quotedIdentifierAlternative
+    | nonReserved                                                      
#unquotedIdentifier
+    ;
+
+quotedIdentifier
+    : BACKQUOTED_IDENTIFIER
+    ;
+
+nonReserved
+    : ALL
+    | ALTER
+    | CREATE
+    | DELETE
+    | DELETE
+    | DROP
+    | INSERT
+    | PRIVILEGES
+    | READ
+    | ROLE
+    | SELECT
+    | UPDATE
+    | USE
+    | WRITE
+    ;
+
+//============================
+// Start of the keywords list
+//============================
+ALL: 'ALL';
+ALTER: 'ALTER';
+CREATE: 'CREATE';
+DELETE: 'DELETE';
+DROP: 'DROP';
+GRANT: 'GRANT';
+INSERT: 'INSERT';
+PRIVILEGES: 'PRIVILEGES';
+READ: 'READ';
+ROLE: 'ROLE';
+SELECT: 'SELECT';
+UPDATE: 'UPDATE';
+USE: 'USE';
+WRITE: 'WRITE';
+
+
+BACKQUOTED_IDENTIFIER
+    : '`' ( ~'`' | '``' )* '`'
+    ;
+
+IDENTIFIER
+    : (LETTER | DIGIT | '_')+
+    ;
+
+fragment DIGIT
+    : [0-9]
+    ;
+
+fragment LETTER
+    : [A-Z]
+    ;
+
+WS  : [ \r\n\t]+ -> channel(HIDDEN)
+    ;
+
+// Catch-all for anything we can't recognize.
+// We use this to be able to ignore and recover all the text
+// when splitting statements with DelimiterLexer
+UNRECOGNIZED
+    : .
+    ;
diff --git 
a/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/SparkAccessControlException.scala
 
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/SparkAccessControlException.scala
index 66eb5f7..b5923d5 100644
--- 
a/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/SparkAccessControlException.scala
+++ 
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/SparkAccessControlException.scala
@@ -19,4 +19,9 @@
 
 package org.apache.submarine.spark.security
 
-class SparkAccessControlException(msg: String) extends Exception(msg)
+class SparkAccessControlException(msg: String, e: Throwable) extends 
Exception(msg, e) {
+
+  def this(msg: String) = {
+    this(msg, null)
+  }
+}
diff --git 
a/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/api/RangerSparkDCLExtension.scala
 
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/api/RangerSparkDCLExtension.scala
new file mode 100644
index 0000000..adec7c8
--- /dev/null
+++ 
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/api/RangerSparkDCLExtension.scala
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.submarine.spark.security.api
+
+import org.apache.spark.sql.SparkSessionExtensions
+
+import org.apache.submarine.spark.security.Extensions
+import org.apache.submarine.spark.security.parser.SubmarineSqlParser
+
+/**
+ * An extension for Spark SQL to activate DCL(Data Control Language)
+ *
+ * Scala example to create a `SparkSession` with the Submarine DCL parser::
+ * {{{
+ *    import org.apache.spark.sql.SparkSession
+ *
+ *    val spark = SparkSession
+ *       .builder()
+ *       .appName("...")
+ *       .master("...")
+ *       .config("spark.sql.extensions",
+ *         "org.apache.submarine.spark.security.api.RangerSparkDCLExtension")
+ *       .getOrCreate()
+ * }}}
+ *
+ * Java example to create a `SparkSession` with the Submarine DCL parser:
+ * {{{
+ *    import org.apache.spark.sql.SparkSession;
+ *
+ *    SparkSession spark = SparkSession
+ *                 .builder()
+ *                 .appName("...")
+ *                 .master("...")
+ *                 .config("spark.sql.extensions",
+ *                     
"org.apache.submarine.spark.security.api.RangerSparkDCLExtension")
+ *                 .getOrCreate();
+ * }}}
+ *
+ * @since 0.4.0
+ */
+class RangerSparkDCLExtension extends Extensions {
+  override def apply(ext: SparkSessionExtensions): Unit = {
+    ext.injectParser((_, parser) => new SubmarineSqlParser(parser))
+  }
+}
diff --git 
a/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/SparkAccessControlException.scala
 
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/parser/SubmarineSqlAstBuilder.scala
similarity index 54%
copy from 
submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/SparkAccessControlException.scala
copy to 
submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/parser/SubmarineSqlAstBuilder.scala
index 66eb5f7..49217c5 100644
--- 
a/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/SparkAccessControlException.scala
+++ 
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/parser/SubmarineSqlAstBuilder.scala
@@ -17,6 +17,22 @@
  * under the License.
  */
 
-package org.apache.submarine.spark.security
+package org.apache.submarine.spark.security.parser
 
-class SparkAccessControlException(msg: String) extends Exception(msg)
+import java.util.Locale
+
+import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+
+import org.apache.submarine.spark.security.command.CreateRoleCommand
+import 
org.apache.submarine.spark.security.parser.SubmarineSqlBaseParser.{CreateRoleContext,
 SingleStatementContext}
+
+class SubmarineSqlAstBuilder extends SubmarineSqlBaseBaseVisitor[AnyRef] {
+
+  override def visitSingleStatement(ctx: SingleStatementContext): LogicalPlan 
= {
+    visit(ctx.statement()).asInstanceOf[LogicalPlan]
+  }
+
+  override def visitCreateRole(ctx: CreateRoleContext): AnyRef = {
+    CreateRoleCommand(ctx.identifier().getText)
+  }
+}
diff --git 
a/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/parser/SubmarineSqlParser.scala
 
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/parser/SubmarineSqlParser.scala
new file mode 100644
index 0000000..f1fd8d0
--- /dev/null
+++ 
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/parser/SubmarineSqlParser.scala
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.submarine.spark.security.parser
+
+import org.antlr.v4.runtime.{CharStreams, CommonTokenStream}
+import org.antlr.v4.runtime.atn.PredictionMode
+import org.antlr.v4.runtime.misc.ParseCancellationException
+import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
+import org.apache.spark.sql.catalyst.expressions.Expression
+import org.apache.spark.sql.catalyst.parser.{ParseErrorListener, 
ParseException, ParserInterface, PostProcessor}
+import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+import org.apache.spark.sql.catalyst.trees.Origin
+import org.apache.spark.sql.types.{DataType, StructType}
+
+class SubmarineSqlParser(val delegate: ParserInterface) extends 
ParserInterface {
+
+  private val astBuilder = new SubmarineSqlAstBuilder
+
+  override def parsePlan(sqlText: String): LogicalPlan = parse(sqlText) { 
parser =>
+    astBuilder.visit(parser.singleStatement()) match {
+      case plan: LogicalPlan => plan
+      case _ => delegate.parsePlan(sqlText)
+    }
+  }
+
+  // scalastyle:off line.size.limit
+  /**
+   * Fork from 
`org.apache.spark.sql.catalyst.parser.AbstractSqlParser#parse(java.lang.String, 
scala.Function1)`.
+   *
+   * @see 
https://github.com/apache/spark/blob/v2.4.4/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala#L81
+   */
+  // scalastyle:on
+  private def parse[T](command: String)(toResult: SubmarineSqlBaseParser => 
T): T = {
+    val lexer = new SubmarineSqlBaseLexer(new 
UpperCaseCharStream(CharStreams.fromString(command)))
+    lexer.removeErrorListeners()
+    lexer.addErrorListener(ParseErrorListener)
+
+    val tokenStream = new CommonTokenStream(lexer)
+    val parser = new SubmarineSqlBaseParser(tokenStream)
+    parser.addParseListener(PostProcessor)
+    parser.removeErrorListeners()
+    parser.addErrorListener(ParseErrorListener)
+
+    try {
+      try {
+        // first, try parsing with potentially faster SLL mode
+        parser.getInterpreter.setPredictionMode(PredictionMode.SLL)
+        toResult(parser)
+      } catch {
+        case e: ParseCancellationException =>
+          // if we fail, parse with LL mode
+          tokenStream.seek(0) // rewind input stream
+          parser.reset()
+
+          // Try Again.
+          parser.getInterpreter.setPredictionMode(PredictionMode.LL)
+          toResult(parser)
+      }
+    } catch {
+      case e: ParseException if e.command.isDefined =>
+        throw e
+      case e: ParseException =>
+        throw e.withCommand(command)
+      case e: AnalysisException =>
+        val position = Origin(e.line, e.startPosition)
+        throw new ParseException(Option(command), e.message, position, 
position)
+    }
+  }
+
+  override def parseExpression(sqlText: String): Expression = {
+    delegate.parseExpression(sqlText)
+  }
+
+  override def parseTableIdentifier(sqlText: String): TableIdentifier = {
+    delegate.parseTableIdentifier(sqlText)
+  }
+
+  override def parseFunctionIdentifier(sqlText: String): FunctionIdentifier = {
+    delegate.parseFunctionIdentifier(sqlText)
+  }
+
+  override def parseTableSchema(sqlText: String): StructType = {
+    delegate.parseTableSchema(sqlText)
+  }
+
+  override def parseDataType(sqlText: String): DataType = {
+    delegate.parseDataType(sqlText)
+  }
+}
diff --git 
a/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/parser/UpperCaseCharStream.scala
 
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/parser/UpperCaseCharStream.scala
new file mode 100644
index 0000000..42424b9
--- /dev/null
+++ 
b/submarine-security/spark-security/src/main/scala/org/apache/submarine/spark/security/parser/UpperCaseCharStream.scala
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.submarine.spark.security.parser
+
+import org.antlr.v4.runtime.{CharStream, CodePointCharStream, IntStream}
+import org.antlr.v4.runtime.misc.Interval
+
+// scalastyle:off line.size.limit
+/**
+ * Adopted from Apache Spark project
+ * @see 
https://github.com/apache/spark/blob/v2.4.4/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala#L157
+ */
+// scalastyle:on line.size.limit
+private[parser] class UpperCaseCharStream(wrapped: CodePointCharStream) 
extends CharStream {
+  override def consume(): Unit = wrapped.consume()
+  override def getSourceName(): String = wrapped.getSourceName
+  override def index(): Int = wrapped.index
+  override def mark(): Int = wrapped.mark
+  override def release(marker: Int): Unit = wrapped.release(marker)
+  override def seek(where: Int): Unit = wrapped.seek(where)
+  override def size(): Int = wrapped.size
+
+  override def getText(interval: Interval): String = {
+    // ANTLR 4.7's CodePointCharStream implementations have bugs when
+    // getText() is called with an empty stream, or intervals where
+    // the start > end. See
+    // https://github.com/antlr/antlr4/commit/ac9f7530 for one fix
+    // that is not yet in a released ANTLR artifact.
+    if (size() > 0 && (interval.b - interval.a >= 0)) {
+      wrapped.getText(interval)
+    } else {
+      ""
+    }
+  }
+
+  // scalastyle:off
+  override def LA(i: Int): Int = {
+    val la = wrapped.LA(i)
+    if (la == 0 || la == IntStream.EOF) la else Character.toUpperCase(la)
+  }
+  // scalastyle:on
+}
diff --git 
a/submarine-security/spark-security/src/test/scala/org/apache/submarine/spark/security/parser/SubmarineSqlParserTest.scala
 
b/submarine-security/spark-security/src/test/scala/org/apache/submarine/spark/security/parser/SubmarineSqlParserTest.scala
new file mode 100644
index 0000000..549c0a8
--- /dev/null
+++ 
b/submarine-security/spark-security/src/test/scala/org/apache/submarine/spark/security/parser/SubmarineSqlParserTest.scala
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.submarine.spark.security.parser
+
+import org.apache.spark.sql.catalyst.parser.ParseException
+import org.apache.spark.sql.hive.test.TestHive
+import org.scalatest.FunSuite
+
+import org.apache.submarine.spark.security.command.CreateRoleCommand
+
+class SubmarineSqlParserTest extends FunSuite {
+
+  private val spark = TestHive.sparkSession.newSession()
+
+  test("create role") {
+    val parser = new SubmarineSqlParser(spark.sessionState.sqlParser)
+
+    val p1 = parser.parsePlan("create role abc")
+    assert(p1.isInstanceOf[CreateRoleCommand])
+    assert(p1.asInstanceOf[CreateRoleCommand].roleName === "abc")
+    val p2 = parser.parsePlan("create role admin")
+    assert(p2.isInstanceOf[CreateRoleCommand])
+    assert(p2.asInstanceOf[CreateRoleCommand].roleName === "admin")
+    val p3 = parser.parsePlan("create role `bob`")
+    assert(p3.isInstanceOf[CreateRoleCommand])
+    assert(p3.asInstanceOf[CreateRoleCommand].roleName === "`bob`")
+    intercept[ParseException](parser.parsePlan("create role 'bob'"))
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to