This is an automated email from the ASF dual-hosted git repository.
wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 0d9f8a107f57 [SPARK-48479][SQL] Support creating scalar and table SQL
UDFs in parser
0d9f8a107f57 is described below
commit 0d9f8a107f570a4bdeb69119f8f378406101ba23
Author: allisonwang-db <[email protected]>
AuthorDate: Thu Jun 20 10:07:30 2024 +0800
[SPARK-48479][SQL] Support creating scalar and table SQL UDFs in parser
### What changes were proposed in this pull request?
This PR adds support for creating user-defined SQL functions in parser.
Here is the SQL syntax:
```
CREATE [OR REPLACE] [TEMPORARY] FUNCTION [IF NOT EXISTS]
[db_name.]function_name
([param_name param_type [COMMENT param_comment], ...])
RETURNS {ret_type | TABLE (ret_name ret_type [COMMENT ret_comment], ...])}
[function_properties] function_body;
function_properties:
[NOT] DETERMINISTIC | COMMENT function_comment | [ CONTAINS SQL | READS
SQL DATA ]
function_body:
RETURN {expression | TABLE ( query )}
```
### Why are the changes needed?
To support SQL user-defined functions.
### Does this PR introduce _any_ user-facing change?
Yes. This PR adds parser support for creating user-defined SQL functions.
### How was this patch tested?
New unit tests.
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #46816 from allisonwang-db/spark-48479-sql-udf-parser.
Authored-by: allisonwang-db <[email protected]>
Signed-off-by: Wenchen Fan <[email protected]>
---
docs/sql-ref-ansi-compliance.md | 14 ++
.../spark/sql/catalyst/parser/SqlBaseLexer.g4 | 14 ++
.../spark/sql/catalyst/parser/SqlBaseParser.g4 | 80 ++++++++++
.../spark/sql/errors/QueryParsingErrors.scala | 8 +-
.../sql/catalyst/catalog/RoutineLanguage.scala | 29 ++++
.../catalog/UserDefinedFunctionErrors.scala | 34 +++++
.../spark/sql/execution/SparkSqlParser.scala | 161 +++++++++++++++++++++
.../command/CreateSQLFunctionCommand.scala | 58 ++++++++
.../command/CreateUserDefinedFunctionCommand.scala | 77 ++++++++++
.../sql-tests/results/ansi/keywords.sql.out | 15 ++
.../resources/sql-tests/results/keywords.sql.out | 14 ++
.../sql/execution/command/DDLParserSuite.scala | 44 ++++++
.../ThriftServerWithSparkContextSuite.scala | 2 +-
13 files changed, 545 insertions(+), 5 deletions(-)
diff --git a/docs/sql-ref-ansi-compliance.md b/docs/sql-ref-ansi-compliance.md
index 40d7d7dd4003..920b3392854c 100644
--- a/docs/sql-ref-ansi-compliance.md
+++ b/docs/sql-ref-ansi-compliance.md
@@ -426,6 +426,7 @@ Below is a list of all the keywords in Spark SQL.
|BY|non-reserved|non-reserved|reserved|
|BYTE|non-reserved|non-reserved|non-reserved|
|CACHE|non-reserved|non-reserved|non-reserved|
+|CALLED|non-reserved|non-reserved|non-reserved|
|CASCADE|non-reserved|non-reserved|non-reserved|
|CASE|reserved|non-reserved|reserved|
|CAST|reserved|non-reserved|reserved|
@@ -452,6 +453,7 @@ Below is a list of all the keywords in Spark SQL.
|COMPUTE|non-reserved|non-reserved|non-reserved|
|CONCATENATE|non-reserved|non-reserved|non-reserved|
|CONSTRAINT|reserved|non-reserved|reserved|
+|CONTAINS|non-reserved|non-reserved|non-reserved|
|COST|non-reserved|non-reserved|non-reserved|
|CREATE|reserved|non-reserved|reserved|
|CROSS|reserved|strict-non-reserved|reserved|
@@ -478,10 +480,12 @@ Below is a list of all the keywords in Spark SQL.
|DECLARE|non-reserved|non-reserved|non-reserved|
|DEFAULT|non-reserved|non-reserved|non-reserved|
|DEFINED|non-reserved|non-reserved|non-reserved|
+|DEFINER|non-reserved|non-reserved|non-reserved|
|DELETE|non-reserved|non-reserved|reserved|
|DELIMITED|non-reserved|non-reserved|non-reserved|
|DESC|non-reserved|non-reserved|non-reserved|
|DESCRIBE|non-reserved|non-reserved|reserved|
+|DETERMINISTIC|non-reserved|non-reserved|reserved|
|DFS|non-reserved|non-reserved|non-reserved|
|DIRECTORIES|non-reserved|non-reserved|non-reserved|
|DIRECTORY|non-reserved|non-reserved|non-reserved|
@@ -540,6 +544,7 @@ Below is a list of all the keywords in Spark SQL.
|INDEXES|non-reserved|non-reserved|non-reserved|
|INNER|reserved|strict-non-reserved|reserved|
|INPATH|non-reserved|non-reserved|non-reserved|
+|INPUT|non-reserved|non-reserved|non-reserved|
|INPUTFORMAT|non-reserved|non-reserved|non-reserved|
|INSERT|non-reserved|non-reserved|reserved|
|INT|non-reserved|non-reserved|reserved|
@@ -547,10 +552,12 @@ Below is a list of all the keywords in Spark SQL.
|INTERSECT|reserved|strict-non-reserved|reserved|
|INTERVAL|non-reserved|non-reserved|reserved|
|INTO|reserved|non-reserved|reserved|
+|INVOKER|non-reserved|non-reserved|non-reserved|
|IS|reserved|non-reserved|reserved|
|ITEMS|non-reserved|non-reserved|non-reserved|
|JOIN|reserved|strict-non-reserved|reserved|
|KEYS|non-reserved|non-reserved|non-reserved|
+|LANGUAGE|non-reserved|non-reserved|reserved|
|LAST|non-reserved|non-reserved|non-reserved|
|LATERAL|reserved|strict-non-reserved|reserved|
|LAZY|non-reserved|non-reserved|non-reserved|
@@ -579,6 +586,7 @@ Below is a list of all the keywords in Spark SQL.
|MINUTE|non-reserved|non-reserved|non-reserved|
|MINUTES|non-reserved|non-reserved|non-reserved|
|MINUS|non-reserved|strict-non-reserved|non-reserved|
+|MODIFIES|non-reserved|non-reserved|non-reserved|
|MONTH|non-reserved|non-reserved|non-reserved|
|MONTHS|non-reserved|non-reserved|non-reserved|
|MSCK|non-reserved|non-reserved|non-reserved|
@@ -623,6 +631,7 @@ Below is a list of all the keywords in Spark SQL.
|QUARTER|non-reserved|non-reserved|non-reserved|
|QUERY|non-reserved|non-reserved|non-reserved|
|RANGE|non-reserved|non-reserved|reserved|
+|READS|non-reserved|non-reserved|non-reserved|
|REAL|non-reserved|non-reserved|reserved|
|RECORDREADER|non-reserved|non-reserved|non-reserved|
|RECORDWRITER|non-reserved|non-reserved|non-reserved|
@@ -638,6 +647,8 @@ Below is a list of all the keywords in Spark SQL.
|RESET|non-reserved|non-reserved|non-reserved|
|RESPECT|non-reserved|non-reserved|non-reserved|
|RESTRICT|non-reserved|non-reserved|non-reserved|
+|RETURN|non-reserved|non-reserved|reserved|
+|RETURNS|non-reserved|non-reserved|reserved|
|REVOKE|non-reserved|non-reserved|reserved|
|RIGHT|reserved|strict-non-reserved|reserved|
|RLIKE|non-reserved|non-reserved|non-reserved|
@@ -651,6 +662,7 @@ Below is a list of all the keywords in Spark SQL.
|SCHEMAS|non-reserved|non-reserved|non-reserved|
|SECOND|non-reserved|non-reserved|non-reserved|
|SECONDS|non-reserved|non-reserved|non-reserved|
+|SECURITY|non-reserved|non-reserved|non-reserved|
|SELECT|reserved|non-reserved|reserved|
|SEMI|non-reserved|strict-non-reserved|non-reserved|
|SEPARATED|non-reserved|non-reserved|non-reserved|
@@ -668,6 +680,8 @@ Below is a list of all the keywords in Spark SQL.
|SORT|non-reserved|non-reserved|non-reserved|
|SORTED|non-reserved|non-reserved|non-reserved|
|SOURCE|non-reserved|non-reserved|non-reserved|
+|SPECIFIC|non-reserved|non-reserved|reserved|
+|SQL|reserved|non-reserved|reserved|
|START|non-reserved|non-reserved|reserved|
|STATISTICS|non-reserved|non-reserved|non-reserved|
|STORED|non-reserved|non-reserved|non-reserved|
diff --git
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
index 5753b153de30..85a4633e8050 100644
---
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
+++
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
@@ -146,6 +146,7 @@ BUCKETS: 'BUCKETS';
BY: 'BY';
BYTE: 'BYTE';
CACHE: 'CACHE';
+CALLED: 'CALLED';
CASCADE: 'CASCADE';
CASE: 'CASE';
CAST: 'CAST';
@@ -172,6 +173,7 @@ COMPENSATION: 'COMPENSATION';
COMPUTE: 'COMPUTE';
CONCATENATE: 'CONCATENATE';
CONSTRAINT: 'CONSTRAINT';
+CONTAINS: 'CONTAINS';
COST: 'COST';
CREATE: 'CREATE';
CROSS: 'CROSS';
@@ -198,10 +200,12 @@ DECIMAL: 'DECIMAL';
DECLARE: 'DECLARE';
DEFAULT: 'DEFAULT';
DEFINED: 'DEFINED';
+DEFINER: 'DEFINER';
DELETE: 'DELETE';
DELIMITED: 'DELIMITED';
DESC: 'DESC';
DESCRIBE: 'DESCRIBE';
+DETERMINISTIC: 'DETERMINISTIC';
DFS: 'DFS';
DIRECTORIES: 'DIRECTORIES';
DIRECTORY: 'DIRECTORY';
@@ -260,6 +264,7 @@ INDEX: 'INDEX';
INDEXES: 'INDEXES';
INNER: 'INNER';
INPATH: 'INPATH';
+INPUT: 'INPUT';
INPUTFORMAT: 'INPUTFORMAT';
INSERT: 'INSERT';
INTERSECT: 'INTERSECT';
@@ -267,10 +272,12 @@ INTERVAL: 'INTERVAL';
INT: 'INT';
INTEGER: 'INTEGER';
INTO: 'INTO';
+INVOKER: 'INVOKER';
IS: 'IS';
ITEMS: 'ITEMS';
JOIN: 'JOIN';
KEYS: 'KEYS';
+LANGUAGE: 'LANGUAGE';
LAST: 'LAST';
LATERAL: 'LATERAL';
LAZY: 'LAZY';
@@ -298,6 +305,7 @@ MILLISECOND: 'MILLISECOND';
MILLISECONDS: 'MILLISECONDS';
MINUTE: 'MINUTE';
MINUTES: 'MINUTES';
+MODIFIES: 'MODIFIES';
MONTH: 'MONTH';
MONTHS: 'MONTHS';
MSCK: 'MSCK';
@@ -342,6 +350,7 @@ PURGE: 'PURGE';
QUARTER: 'QUARTER';
QUERY: 'QUERY';
RANGE: 'RANGE';
+READS: 'READS';
REAL: 'REAL';
RECORDREADER: 'RECORDREADER';
RECORDWRITER: 'RECORDWRITER';
@@ -356,6 +365,8 @@ REPLACE: 'REPLACE';
RESET: 'RESET';
RESPECT: 'RESPECT';
RESTRICT: 'RESTRICT';
+RETURN: 'RETURN';
+RETURNS: 'RETURNS';
REVOKE: 'REVOKE';
RIGHT: 'RIGHT';
RLIKE: 'RLIKE' | 'REGEXP';
@@ -369,6 +380,7 @@ SECOND: 'SECOND';
SECONDS: 'SECONDS';
SCHEMA: 'SCHEMA';
SCHEMAS: 'SCHEMAS';
+SECURITY: 'SECURITY';
SELECT: 'SELECT';
SEMI: 'SEMI';
SEPARATED: 'SEPARATED';
@@ -387,6 +399,8 @@ SOME: 'SOME';
SORT: 'SORT';
SORTED: 'SORTED';
SOURCE: 'SOURCE';
+SPECIFIC: 'SPECIFIC';
+SQL: 'SQL';
START: 'START';
STATISTICS: 'STATISTICS';
STORED: 'STORED';
diff --git
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
index 37671fc735c3..7501283a4ac3 100644
---
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
+++
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
@@ -190,6 +190,11 @@ statement
| CREATE (OR REPLACE)? TEMPORARY? FUNCTION (IF errorCapturingNot EXISTS)?
identifierReference AS className=stringLit
(USING resource (COMMA resource)*)?
#createFunction
+ | CREATE (OR REPLACE)? TEMPORARY? FUNCTION (IF errorCapturingNot EXISTS)?
+ identifierReference LEFT_PAREN parameters=colDefinitionList?
RIGHT_PAREN
+ (RETURNS (dataType | TABLE LEFT_PAREN returnParams=colTypeList
RIGHT_PAREN))?
+ routineCharacteristics
+ RETURN (query | expression)
#createUserDefinedFunction
| DROP TEMPORARY? FUNCTION (IF EXISTS)? identifierReference
#dropFunction
| DECLARE (OR REPLACE)? VARIABLE?
identifierReference dataType? variableDefaultExpression?
#createVariable
@@ -1216,6 +1221,14 @@ createOrReplaceTableColType
: colName=errorCapturingIdentifier dataType colDefinitionOption*
;
+colDefinitionList
+ : colDefinition (COMMA colDefinition)*
+ ;
+
+colDefinition
+ : colName=errorCapturingIdentifier dataType colDefinitionOption*
+ ;
+
colDefinitionOption
: errorCapturingNot NULL
| defaultExpression
@@ -1235,6 +1248,46 @@ complexColType
: errorCapturingIdentifier COLON? dataType (errorCapturingNot NULL)?
commentSpec?
;
+routineCharacteristics
+ : (routineLanguage
+ | specificName
+ | deterministic
+ | sqlDataAccess
+ | nullCall
+ | commentSpec
+ | rightsClause)*
+ ;
+
+routineLanguage
+ : LANGUAGE (SQL | IDENTIFIER)
+ ;
+
+specificName
+ : SPECIFIC specific=errorCapturingIdentifier
+ ;
+
+deterministic
+ : DETERMINISTIC
+ | errorCapturingNot DETERMINISTIC
+ ;
+
+sqlDataAccess
+ : access=NO SQL
+ | access=CONTAINS SQL
+ | access=READS SQL DATA
+ | access=MODIFIES SQL DATA
+ ;
+
+nullCall
+ : RETURNS NULL ON NULL INPUT
+ | CALLED ON NULL INPUT
+ ;
+
+rightsClause
+ : SQL SECURITY INVOKER
+ | SQL SECURITY DEFINER
+ ;
+
whenClause
: WHEN condition=expression THEN result=expression
;
@@ -1394,6 +1447,7 @@ ansiNonReserved
| BY
| BYTE
| CACHE
+ | CALLED
| CASCADE
| CATALOG
| CATALOGS
@@ -1413,6 +1467,7 @@ ansiNonReserved
| COMPENSATION
| COMPUTE
| CONCATENATE
+ | CONTAINS
| COST
| CUBE
| CURRENT
@@ -1433,10 +1488,12 @@ ansiNonReserved
| DECLARE
| DEFAULT
| DEFINED
+ | DEFINER
| DELETE
| DELIMITED
| DESC
| DESCRIBE
+ | DETERMINISTIC
| DFS
| DIRECTORIES
| DIRECTORY
@@ -1477,13 +1534,16 @@ ansiNonReserved
| INDEX
| INDEXES
| INPATH
+ | INPUT
| INPUTFORMAT
| INSERT
| INT
| INTEGER
| INTERVAL
+ | INVOKER
| ITEMS
| KEYS
+ | LANGUAGE
| LAST
| LAZY
| LIKE
@@ -1508,6 +1568,7 @@ ansiNonReserved
| MILLISECONDS
| MINUTE
| MINUTES
+ | MODIFIES
| MONTH
| MONTHS
| MSCK
@@ -1541,6 +1602,7 @@ ansiNonReserved
| QUARTER
| QUERY
| RANGE
+ | READS
| REAL
| RECORDREADER
| RECORDWRITER
@@ -1554,6 +1616,8 @@ ansiNonReserved
| RESET
| RESPECT
| RESTRICT
+ | RETURN
+ | RETURNS
| REVOKE
| RLIKE
| ROLE
@@ -1566,6 +1630,7 @@ ansiNonReserved
| SCHEMAS
| SECOND
| SECONDS
+ | SECURITY
| SEMI
| SEPARATED
| SERDE
@@ -1581,6 +1646,7 @@ ansiNonReserved
| SORT
| SORTED
| SOURCE
+ | SPECIFIC
| START
| STATISTICS
| STORED
@@ -1698,6 +1764,7 @@ nonReserved
| BY
| BYTE
| CACHE
+ | CALLED
| CASCADE
| CASE
| CAST
@@ -1724,6 +1791,7 @@ nonReserved
| COMPUTE
| CONCATENATE
| CONSTRAINT
+ | CONTAINS
| COST
| CREATE
| CUBE
@@ -1749,10 +1817,12 @@ nonReserved
| DECLARE
| DEFAULT
| DEFINED
+ | DEFINER
| DELETE
| DELIMITED
| DESC
| DESCRIBE
+ | DETERMINISTIC
| DFS
| DIRECTORIES
| DIRECTORY
@@ -1808,15 +1878,18 @@ nonReserved
| INDEX
| INDEXES
| INPATH
+ | INPUT
| INPUTFORMAT
| INSERT
| INT
| INTEGER
| INTERVAL
| INTO
+ | INVOKER
| IS
| ITEMS
| KEYS
+ | LANGUAGE
| LAST
| LAZY
| LEADING
@@ -1843,6 +1916,7 @@ nonReserved
| MILLISECONDS
| MINUTE
| MINUTES
+ | MODIFIES
| MONTH
| MONTHS
| MSCK
@@ -1885,6 +1959,7 @@ nonReserved
| QUARTER
| QUERY
| RANGE
+ | READS
| REAL
| RECORDREADER
| RECORDWRITER
@@ -1899,6 +1974,8 @@ nonReserved
| RESET
| RESPECT
| RESTRICT
+ | RETURN
+ | RETURNS
| REVOKE
| RLIKE
| ROLE
@@ -1911,6 +1988,7 @@ nonReserved
| SCHEMAS
| SECOND
| SECONDS
+ | SECURITY
| SELECT
| SEPARATED
| SERDE
@@ -1927,6 +2005,8 @@ nonReserved
| SORT
| SORTED
| SOURCE
+ | SPECIFIC
+ | SQL
| START
| STATISTICS
| STORED
diff --git
a/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
b/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
index 816fa546a138..e7ae9f2bfb7b 100644
---
a/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
+++
b/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
@@ -574,19 +574,19 @@ private[sql] object QueryParsingErrors extends
DataTypeErrorsBase {
ctx)
}
- def createFuncWithBothIfNotExistsAndReplaceError(ctx:
CreateFunctionContext): Throwable = {
+ def createFuncWithBothIfNotExistsAndReplaceError(ctx: ParserRuleContext):
Throwable = {
new ParseException(
errorClass =
"INVALID_SQL_SYNTAX.CREATE_ROUTINE_WITH_IF_NOT_EXISTS_AND_REPLACE",
ctx)
}
- def defineTempFuncWithIfNotExistsError(ctx: CreateFunctionContext):
Throwable = {
+ def defineTempFuncWithIfNotExistsError(ctx: ParserRuleContext): Throwable = {
new ParseException(
errorClass = "INVALID_SQL_SYNTAX.CREATE_TEMP_FUNC_WITH_IF_NOT_EXISTS",
ctx)
}
- def unsupportedFunctionNameError(funcName: Seq[String], ctx:
CreateFunctionContext): Throwable = {
+ def unsupportedFunctionNameError(funcName: Seq[String], ctx:
ParserRuleContext): Throwable = {
new ParseException(
errorClass = "INVALID_SQL_SYNTAX.MULTI_PART_NAME",
messageParameters = Map(
@@ -597,7 +597,7 @@ private[sql] object QueryParsingErrors extends
DataTypeErrorsBase {
def specifyingDBInCreateTempFuncError(
databaseName: String,
- ctx: CreateFunctionContext): Throwable = {
+ ctx: ParserRuleContext): Throwable = {
new ParseException(
errorClass = "INVALID_SQL_SYNTAX.CREATE_TEMP_FUNC_WITH_DATABASE",
messageParameters = Map("database" -> toSQLId(databaseName)),
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/RoutineLanguage.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/RoutineLanguage.scala
new file mode 100644
index 000000000000..fc02bf0c606d
--- /dev/null
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/RoutineLanguage.scala
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.catalog
+
+/**
+ * Supported routine languages for UDFs created via SQL.
+ */
+sealed trait RoutineLanguage {
+ def name: String
+}
+
+case object LanguageSQL extends RoutineLanguage {
+ override def name: String = "SQL"
+}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/UserDefinedFunctionErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/UserDefinedFunctionErrors.scala
new file mode 100644
index 000000000000..a5381669caea
--- /dev/null
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/UserDefinedFunctionErrors.scala
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.catalog
+
+import org.apache.spark.SparkException
+import org.apache.spark.sql.errors.QueryErrorsBase
+
+/**
+ * Errors during registering and executing [[UserDefinedFunction]]s.
+ */
+object UserDefinedFunctionErrors extends QueryErrorsBase {
+ def unsupportedUserDefinedFunction(language: RoutineLanguage): Throwable = {
+ unsupportedUserDefinedFunction(language.name)
+ }
+
+ def unsupportedUserDefinedFunction(language: String): Throwable = {
+ SparkException.internalError(s"Unsupported user defined function type:
$language")
+ }
+}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 2caa8c074f64..a5b9aebefc43 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -653,6 +653,167 @@ class SparkSqlAstBuilder extends AstBuilder {
})
}
+ /**
+ * Create a [[CreateUserDefinedFunctionCommand]].
+ *
+ * For example:
+ * {{{
+ * CREATE [OR REPLACE] [TEMPORARY] FUNCTION [IF NOT EXISTS]
[db_name.]function_name
+ * ([param_name param_type [COMMENT param_comment], ...])
+ * RETURNS {ret_type | TABLE (ret_name ret_type [COMMENT ret_comment],
...])}
+ * [routine_characteristics]
+ * RETURN {expression | TABLE ( query )};
+ * }}}
+ */
+ override def visitCreateUserDefinedFunction(ctx:
CreateUserDefinedFunctionContext): LogicalPlan =
+ withOrigin(ctx) {
+ assert(ctx.expression != null || ctx.query != null)
+
+ if (ctx.EXISTS != null && ctx.REPLACE != null) {
+ throw
QueryParsingErrors.createFuncWithBothIfNotExistsAndReplaceError(ctx)
+ }
+
+ val inputParamText = Option(ctx.parameters).map(source)
+ val returnTypeText: String =
+ if (ctx.RETURNS != null &&
+ (Option(ctx.dataType).nonEmpty ||
Option(ctx.returnParams).nonEmpty)) {
+ source(Option(ctx.dataType).getOrElse(ctx.returnParams))
+ } else {
+ ""
+ }
+ val exprText = Option(ctx.expression()).map(source)
+ val queryText = Option(ctx.query()).map(source)
+
+ val (containsSQL, deterministic, comment, optionalLanguage) =
+ visitRoutineCharacteristics(ctx.routineCharacteristics())
+ val language: RoutineLanguage = optionalLanguage.getOrElse(LanguageSQL)
+ val isTableFunc = ctx.TABLE() != null ||
returnTypeText.equalsIgnoreCase("table")
+
+ withIdentClause(ctx.identifierReference(), functionIdentifier => {
+ if (ctx.TEMPORARY == null) {
+ // TODO: support creating persistent UDFs.
+ operationNotAllowed(s"creating persistent SQL functions is not
supported", ctx)
+ } else {
+ // Disallow to define a temporary function with `IF NOT EXISTS`
+ if (ctx.EXISTS != null) {
+ throw QueryParsingErrors.defineTempFuncWithIfNotExistsError(ctx)
+ }
+
+ if (functionIdentifier.length > 2) {
+ throw
QueryParsingErrors.unsupportedFunctionNameError(functionIdentifier, ctx)
+ } else if (functionIdentifier.length == 2) {
+ // Temporary function names should not contain database prefix
like "database.function"
+ throw
QueryParsingErrors.specifyingDBInCreateTempFuncError(functionIdentifier.head,
ctx)
+ }
+
+ CreateUserDefinedFunctionCommand(
+ functionIdentifier.asFunctionIdentifier,
+ inputParamText,
+ returnTypeText,
+ exprText,
+ queryText,
+ comment,
+ deterministic,
+ containsSQL,
+ language,
+ isTableFunc,
+ isTemp = true,
+ ctx.EXISTS != null,
+ ctx.REPLACE != null
+ )
+ }
+ })
+ }
+
+ /**
+ * SQL function routine characteristics.
+ * Currently only deterministic clause and comment clause are used.
+ *
+ * routine language: [LANGUAGE SQL | IDENTIFIER]
+ * specific name: [SPECIFIC specific_name]
+ * routine data access: [NO SQL | CONTAINS SQL | READS SQL DATA | MODIFIES
SQL DATA]
+ * routine null call: [RETURNS NULL ON NULL INPUT | CALLED ON NULL INPUT]
+ * routine determinism: [DETERMINISTIC | NOT DETERMINISTIC]
+ * comment: [COMMENT function_comment]
+ * rights: [SQL SECURITY INVOKER | SQL SECURITY DEFINER]
+ */
+ override def visitRoutineCharacteristics(ctx: RoutineCharacteristicsContext)
+ : (Option[Boolean], Option[Boolean], Option[String],
Option[RoutineLanguage]) =
+ withOrigin(ctx) {
+ checkDuplicateClauses(ctx.routineLanguage(), "LANGUAGE", ctx)
+ checkDuplicateClauses(ctx.specificName(), "SPECIFIC", ctx)
+ checkDuplicateClauses(ctx.sqlDataAccess(), "SQL DATA ACCESS", ctx)
+ checkDuplicateClauses(ctx.nullCall(), "NULL CALL", ctx)
+ checkDuplicateClauses(ctx.deterministic(), "DETERMINISTIC", ctx)
+ checkDuplicateClauses(ctx.commentSpec(), "COMMENT", ctx)
+ checkDuplicateClauses(ctx.rightsClause(), "SQL SECURITY RIGHTS", ctx)
+
+ val language: Option[RoutineLanguage] = ctx
+ .routineLanguage()
+ .asScala
+ .headOption
+ .map(x => {
+ if (x.SQL() != null) {
+ LanguageSQL
+ } else {
+ val name: String = x.IDENTIFIER().getText()
+ operationNotAllowed(s"Unsupported language for user defined
functions: $name", x)
+ }
+ })
+
+ val deterministic =
ctx.deterministic().asScala.headOption.map(visitDeterminism)
+ val comment = visitCommentSpecList(ctx.commentSpec())
+
+ ctx.specificName().asScala.headOption.foreach(checkSpecificName)
+ ctx.nullCall().asScala.headOption.foreach(checkNullCall)
+ ctx.rightsClause().asScala.headOption.foreach(checkRightsClause)
+ val containsSQL: Option[Boolean] =
+ ctx.sqlDataAccess().asScala.headOption.map(visitDataAccess)
+ (containsSQL, deterministic, comment, language)
+ }
+
+ /**
+ * Check if the function has a SPECIFIC name,
+ * which is a way to provide an alternative name for the function.
+ * This check applies for all user defined functions.
+ * Use functionName to specify the function that is currently checked.
+ */
+ private def checkSpecificName(ctx: SpecificNameContext): Unit =
+ withOrigin(ctx) {
+ operationNotAllowed(s"SQL function with SPECIFIC name is not supported",
ctx)
+ }
+
+ private def checkNullCall(ctx: NullCallContext): Unit = withOrigin(ctx) {
+ if (ctx.RETURNS() != null) {
+ operationNotAllowed("SQL function with RETURNS NULL ON NULL INPUT is not
supported", ctx)
+ }
+ }
+
+ /**
+ * Check SQL function data access clause. Currently only READS SQL DATA and
CONTAINS SQL
+ * are supported. Return true if the data access routine is CONTAINS SQL.
+ */
+ private def visitDataAccess(ctx: SqlDataAccessContext): Boolean =
withOrigin(ctx) {
+ if (ctx.NO() != null) {
+ operationNotAllowed("SQL function with NO SQL is not supported", ctx)
+ }
+ if (ctx.MODIFIES() != null) {
+ operationNotAllowed("SQL function with MODIFIES SQL DATA is not
supported", ctx)
+ }
+ return ctx.READS() == null
+ }
+
+ private def checkRightsClause(ctx: RightsClauseContext): Unit =
withOrigin(ctx) {
+ if (ctx.INVOKER() != null) {
+ operationNotAllowed("SQL function with SQL SECURITY INVOKER is not
supported", ctx)
+ }
+ }
+
+ private def visitDeterminism(ctx: DeterministicContext): Boolean =
withOrigin(ctx) {
+ blockBang(ctx.errorCapturingNot())
+ ctx.errorCapturingNot() == null
+ }
+
/**
* Create a DROP FUNCTION statement.
*
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/CreateSQLFunctionCommand.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/CreateSQLFunctionCommand.scala
new file mode 100644
index 000000000000..d9ec807c2dc8
--- /dev/null
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/CreateSQLFunctionCommand.scala
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import org.apache.spark.sql.{Row, SparkSession}
+import org.apache.spark.sql.catalyst.FunctionIdentifier
+
+/**
+ * The DDL command that creates a SQL function.
+ * For example:
+ * {{{
+ * CREATE [OR REPLACE] [TEMPORARY] FUNCTION [IF NOT EXISTS]
[db_name.]function_name
+ * ([param_name param_type [COMMENT param_comment], ...])
+ * RETURNS {ret_type | TABLE (ret_name ret_type [COMMENT ret_comment],
...])}
+ * [function_properties] function_body;
+ *
+ * function_properties:
+ * [NOT] DETERMINISTIC | COMMENT function_comment | [ CONTAINS SQL |
READS SQL DATA ]
+ *
+ * function_body:
+ * RETURN {expression | TABLE ( query )}
+ * }}}
+ */
+case class CreateSQLFunctionCommand(
+ name: FunctionIdentifier,
+ inputParamText: Option[String],
+ returnTypeText: String,
+ exprText: Option[String],
+ queryText: Option[String],
+ comment: Option[String],
+ isDeterministic: Option[Boolean],
+ containsSQL: Option[Boolean],
+ isTableFunc: Boolean,
+ isTemp: Boolean,
+ ignoreIfExists: Boolean,
+ replace: Boolean)
+ extends CreateUserDefinedFunctionCommand {
+
+ override def run(sparkSession: SparkSession): Seq[Row] = {
+ // TODO: Implement this.
+ Seq.empty
+ }
+}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/CreateUserDefinedFunctionCommand.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/CreateUserDefinedFunctionCommand.scala
new file mode 100644
index 000000000000..bebb0f5cf6c3
--- /dev/null
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/CreateUserDefinedFunctionCommand.scala
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import org.apache.spark.sql.catalyst.FunctionIdentifier
+import org.apache.spark.sql.catalyst.catalog.{LanguageSQL, RoutineLanguage,
UserDefinedFunctionErrors}
+import org.apache.spark.sql.catalyst.plans.logical.IgnoreCachedData
+
+/**
+ * The base class for CreateUserDefinedFunctionCommand
+ */
+abstract class CreateUserDefinedFunctionCommand
+ extends LeafRunnableCommand with IgnoreCachedData
+
+
+object CreateUserDefinedFunctionCommand {
+
+ /**
+ * This factory methods serves as a central place to verify required inputs
and
+ * returns the CREATE command for the parsed user defined function.
+ */
+ // scalastyle:off argcount
+ def apply(
+ name: FunctionIdentifier,
+ inputParamText: Option[String],
+ returnTypeText: String,
+ exprText: Option[String],
+ queryText: Option[String],
+ comment: Option[String],
+ isDeterministic: Option[Boolean],
+ containsSQL: Option[Boolean],
+ language: RoutineLanguage,
+ isTableFunc: Boolean,
+ isTemp: Boolean,
+ ignoreIfExists: Boolean,
+ replace: Boolean
+ ): CreateUserDefinedFunctionCommand = {
+ // scalastyle:on argcount
+
+ assert(language != null)
+
+ language match {
+ case LanguageSQL =>
+ CreateSQLFunctionCommand(
+ name,
+ inputParamText,
+ returnTypeText,
+ exprText,
+ queryText,
+ comment,
+ isDeterministic,
+ containsSQL,
+ isTableFunc,
+ isTemp,
+ ignoreIfExists,
+ replace)
+
+ case other =>
+ throw UserDefinedFunctionErrors.unsupportedUserDefinedFunction(other)
+ }
+ }
+}
diff --git
a/sql/core/src/test/resources/sql-tests/results/ansi/keywords.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/keywords.sql.out
index 4e928562aa5d..cabbfa520d77 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/keywords.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/keywords.sql.out
@@ -32,6 +32,7 @@ BUCKETS false
BY false
BYTE false
CACHE false
+CALLED false
CASCADE false
CASE true
CAST true
@@ -58,6 +59,7 @@ COMPENSATION false
COMPUTE false
CONCATENATE false
CONSTRAINT true
+CONTAINS false
COST false
CREATE true
CROSS true
@@ -84,10 +86,12 @@ DECIMAL false
DECLARE false
DEFAULT false
DEFINED false
+DEFINER false
DELETE false
DELIMITED false
DESC false
DESCRIBE false
+DETERMINISTIC false
DFS false
DIRECTORIES false
DIRECTORY false
@@ -147,6 +151,7 @@ INDEX false
INDEXES false
INNER true
INPATH false
+INPUT false
INPUTFORMAT false
INSERT false
INT false
@@ -154,10 +159,12 @@ INTEGER false
INTERSECT true
INTERVAL false
INTO true
+INVOKER false
IS true
ITEMS false
JOIN true
KEYS false
+LANGUAGE false
LAST false
LATERAL true
LAZY false
@@ -185,6 +192,7 @@ MILLISECONDS false
MINUS false
MINUTE false
MINUTES false
+MODIFIES false
MONTH false
MONTHS false
MSCK false
@@ -229,6 +237,7 @@ PURGE false
QUARTER false
QUERY false
RANGE false
+READS false
REAL false
RECORDREADER false
RECORDWRITER false
@@ -243,6 +252,8 @@ REPLACE false
RESET false
RESPECT false
RESTRICT false
+RETURN false
+RETURNS false
REVOKE false
RIGHT true
ROLE false
@@ -255,6 +266,7 @@ SCHEMA false
SCHEMAS false
SECOND false
SECONDS false
+SECURITY false
SELECT true
SEMI false
SEPARATED false
@@ -272,6 +284,8 @@ SOME true
SORT false
SORTED false
SOURCE false
+SPECIFIC false
+SQL true
START false
STATISTICS false
STORED false
@@ -409,6 +423,7 @@ RIGHT
SELECT
SESSION_USER
SOME
+SQL
TABLE
THEN
TIME
diff --git a/sql/core/src/test/resources/sql-tests/results/keywords.sql.out
b/sql/core/src/test/resources/sql-tests/results/keywords.sql.out
index e036c6620776..e304509aa6d7 100644
--- a/sql/core/src/test/resources/sql-tests/results/keywords.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/keywords.sql.out
@@ -32,6 +32,7 @@ BUCKETS false
BY false
BYTE false
CACHE false
+CALLED false
CASCADE false
CASE false
CAST false
@@ -58,6 +59,7 @@ COMPENSATION false
COMPUTE false
CONCATENATE false
CONSTRAINT false
+CONTAINS false
COST false
CREATE false
CROSS false
@@ -84,10 +86,12 @@ DECIMAL false
DECLARE false
DEFAULT false
DEFINED false
+DEFINER false
DELETE false
DELIMITED false
DESC false
DESCRIBE false
+DETERMINISTIC false
DFS false
DIRECTORIES false
DIRECTORY false
@@ -147,6 +151,7 @@ INDEX false
INDEXES false
INNER false
INPATH false
+INPUT false
INPUTFORMAT false
INSERT false
INT false
@@ -154,10 +159,12 @@ INTEGER false
INTERSECT false
INTERVAL false
INTO false
+INVOKER false
IS false
ITEMS false
JOIN false
KEYS false
+LANGUAGE false
LAST false
LATERAL false
LAZY false
@@ -185,6 +192,7 @@ MILLISECONDS false
MINUS false
MINUTE false
MINUTES false
+MODIFIES false
MONTH false
MONTHS false
MSCK false
@@ -229,6 +237,7 @@ PURGE false
QUARTER false
QUERY false
RANGE false
+READS false
REAL false
RECORDREADER false
RECORDWRITER false
@@ -243,6 +252,8 @@ REPLACE false
RESET false
RESPECT false
RESTRICT false
+RETURN false
+RETURNS false
REVOKE false
RIGHT false
ROLE false
@@ -255,6 +266,7 @@ SCHEMA false
SCHEMAS false
SECOND false
SECONDS false
+SECURITY false
SELECT false
SEMI false
SEPARATED false
@@ -272,6 +284,8 @@ SOME false
SORT false
SORTED false
SOURCE false
+SPECIFIC false
+SQL false
START false
STATISTICS false
STORED false
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
index 6b66f521e930..a4ee33ff1060 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
@@ -18,6 +18,7 @@
package org.apache.spark.sql.execution.command
import org.apache.spark.SparkThrowable
+import org.apache.spark.sql.catalyst.FunctionIdentifier
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView,
LocalTempView, SchemaCompensation, UnresolvedAttribute, UnresolvedFunctionName,
UnresolvedIdentifier}
import org.apache.spark.sql.catalyst.catalog.{ArchiveResource, FileResource,
FunctionResource, JarResource}
import org.apache.spark.sql.catalyst.dsl.expressions._
@@ -36,6 +37,9 @@ class DDLParserSuite extends AnalysisTest with
SharedSparkSession {
super.parseException(parser.parsePlan)(sqlText)
}
+ private def intercept(sqlCommand: String, messages: String*): Unit =
+ interceptParseException(parser.parsePlan)(sqlCommand, messages: _*)()
+
private def compareTransformQuery(sql: String, expected: LogicalPlan): Unit
= {
val plan =
parser.parsePlan(sql).asInstanceOf[ScriptTransformation].copy(ioschema = null)
comparePlans(plan, expected, checkAnalysis = false)
@@ -841,4 +845,44 @@ class DDLParserSuite extends AnalysisTest with
SharedSparkSession {
parser.parsePlan("SHOW CATALOGS LIKE 'defau*'"),
ShowCatalogsCommand(Some("defau*")))
}
+
+ test("Create SQL functions") {
+ comparePlans(
+ parser.parsePlan("CREATE TEMP FUNCTION foo() RETURNS INT RETURN 1"),
+ CreateSQLFunctionCommand(
+ FunctionIdentifier("foo"),
+ inputParamText = None,
+ returnTypeText = "INT",
+ exprText = Some("1"),
+ queryText = None,
+ comment = None,
+ isDeterministic = None,
+ containsSQL = None,
+ isTableFunc = false,
+ isTemp = true,
+ ignoreIfExists = false,
+ replace = false))
+ intercept("CREATE FUNCTION foo() RETURNS INT RETURN 1",
+ "Operation not allowed: creating persistent SQL functions is not
supported")
+ }
+
+ test("create SQL functions with unsupported routine characteristics") {
+ intercept("CREATE FUNCTION foo() RETURNS INT LANGUAGE blah RETURN 1",
+ "Operation not allowed: Unsupported language for user defined functions:
blah")
+
+ intercept("CREATE FUNCTION foo() RETURNS INT SPECIFIC foo1 RETURN 1",
+ "Operation not allowed: SQL function with SPECIFIC name is not
supported")
+
+ intercept("CREATE FUNCTION foo() RETURNS INT NO SQL RETURN 1",
+ "Operation not allowed: SQL function with NO SQL is not supported")
+
+ intercept("CREATE FUNCTION foo() RETURNS INT NO SQL CONTAINS SQL RETURN 1",
+ "Found duplicate clauses: SQL DATA ACCESS")
+
+ intercept("CREATE FUNCTION foo() RETURNS INT RETURNS NULL ON NULL INPUT
RETURN 1",
+ "Operation not allowed: SQL function with RETURNS NULL ON NULL INPUT is
not supported")
+
+ intercept("CREATE FUNCTION foo() RETURNS INT SQL SECURITY INVOKER RETURN
1",
+ "Operation not allowed: SQL function with SQL SECURITY INVOKER is not
supported")
+ }
}
diff --git
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
index 43d3532ab78c..e757487915bb 100644
---
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
+++
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
@@ -214,7 +214,7 @@ trait ThriftServerWithSparkContextSuite extends
SharedThriftServer {
val sessionHandle = client.openSession(user, "")
val infoValue = client.getInfo(sessionHandle,
GetInfoType.CLI_ODBC_KEYWORDS)
// scalastyle:off line.size.limit
- assert(infoValue.getStringValue ==
"ADD,AFTER,ALL,ALTER,ALWAYS,ANALYZE,AND,ANTI,ANY,ANY_VALUE,ARCHIVE,ARRAY,AS,ASC,AT,AUTHORIZATION,BEGIN,BETWEEN,BIGINT,BINARY,BINDING,BOOLEAN,BOTH,BUCKET,BUCKETS,BY,BYTE,CACHE,CASCADE,CASE,CAST,CATALOG,CATALOGS,CHANGE,CHAR,CHARACTER,CHECK,CLEAR,CLUSTER,CLUSTERED,CODEGEN,COLLATE,COLLATION,COLLECTION,COLUMN,COLUMNS,COMMENT,COMMIT,COMPACT,COMPACTIONS,COMPENSATION,COMPUTE,CONCATENATE,CONSTRAINT,COST,CREATE,CROSS,CUBE,CURRENT,CURRENT_DATE,CURRENT_TIME,C
[...]
+ assert(infoValue.getStringValue ==
"ADD,AFTER,ALL,ALTER,ALWAYS,ANALYZE,AND,ANTI,ANY,ANY_VALUE,ARCHIVE,ARRAY,AS,ASC,AT,AUTHORIZATION,BEGIN,BETWEEN,BIGINT,BINARY,BINDING,BOOLEAN,BOTH,BUCKET,BUCKETS,BY,BYTE,CACHE,CALLED,CASCADE,CASE,CAST,CATALOG,CATALOGS,CHANGE,CHAR,CHARACTER,CHECK,CLEAR,CLUSTER,CLUSTERED,CODEGEN,COLLATE,COLLATION,COLLECTION,COLUMN,COLUMNS,COMMENT,COMMIT,COMPACT,COMPACTIONS,COMPENSATION,COMPUTE,CONCATENATE,CONSTRAINT,CONTAINS,COST,CREATE,CROSS,CUBE,CURRENT,CURRENT_DAT
[...]
// scalastyle:on line.size.limit
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]