gatorsmile closed pull request #13706: [SPARK-15988] [SQL] Implement DDL
commands: Create/Drop temporary macro
URL: https://github.com/apache/spark/pull/13706
This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:
As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):
diff --git
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index dc11e536efc45..a56d74ab0a5f5 100644
---
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -129,6 +129,9 @@ statement
| CREATE TEMPORARY? FUNCTION qualifiedName AS className=STRING
(USING resource (',' resource)*)?
#createFunction
| DROP TEMPORARY? FUNCTION (IF EXISTS)? qualifiedName
#dropFunction
+ | CREATE TEMPORARY MACRO macroName=identifier
+ '(' colTypeList? ')' expression #createMacro
+ | DROP TEMPORARY MACRO (IF EXISTS)? macroName=identifier
#dropMacro
| EXPLAIN (LOGICAL | FORMATTED | EXTENDED | CODEGEN | COST)?
statement #explain
| SHOW TABLES ((FROM | IN) db=identifier)?
diff --git
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/ExpressionInfo.java
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/ExpressionInfo.java
index 4565ed44877a5..681ab8669f842 100644
---
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/ExpressionInfo.java
+++
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/ExpressionInfo.java
@@ -21,11 +21,17 @@
* Expression information, will be used to describe a expression.
*/
public class ExpressionInfo {
+
+ public enum FunctionType {
+ BUILTIN, PERSISTENT, TEMPORARY;
+ }
+
private String className;
private String usage;
private String name;
private String extended;
private String db;
+ private FunctionType functionType;
public String getClassName() {
return className;
@@ -47,19 +53,32 @@ public String getDb() {
return db;
}
- public ExpressionInfo(String className, String db, String name, String
usage, String extended) {
+ public FunctionType getFunctionType() {
+ return functionType;
+ }
+
+ public ExpressionInfo(String className, String db, String name, String
usage, String extended, FunctionType functionType) {
this.className = className;
this.db = db;
this.name = name;
this.usage = usage;
this.extended = extended;
+ this.functionType = functionType;
}
public ExpressionInfo(String className, String name) {
- this(className, null, name, null, null);
+ this(className, null, name, null, null, FunctionType.TEMPORARY);
+ }
+
+ public ExpressionInfo(String className, String name, FunctionType
functionType) {
+ this(className, null, name, null, null, functionType);
}
public ExpressionInfo(String className, String db, String name) {
- this(className, db, name, null, null);
+ this(className, db, name, null, null, FunctionType.TEMPORARY);
+ }
+
+ public ExpressionInfo(String className, String db, String name,
FunctionType functionType) {
+ this(className, db, name, null, null, functionType);
}
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
index 50ee6cd4085ea..4b75924d87b68 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
@@ -55,3 +55,12 @@ class AnalysisException protected[sql] (
s"$message;$lineAnnotation$positionAnnotation"
}
}
+
+object AnalysisException {
+ /**
+ * Create a no such temporary macro exception.
+ */
+ def noSuchTempMacroException(func: String): AnalysisException = {
+ new AnalysisException(s"Temporary macro '$func' not found")
+ }
+}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index a4c7f7a8de223..0d87bb320e616 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -26,6 +26,7 @@ import scala.util.{Failure, Success, Try}
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.FunctionBuilder
import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.expressions.ExpressionInfo.FunctionType
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.expressions.xml._
import org.apache.spark.sql.catalyst.util.StringKeyHashMap
@@ -120,6 +121,58 @@ class SimpleFunctionRegistry extends FunctionRegistry {
}
}
+class SystemFunctionRegistry(builtin: SimpleFunctionRegistry) extends
SimpleFunctionRegistry {
+
+ override def registerFunction(
+ name: String,
+ info: ExpressionInfo,
+ builder: FunctionBuilder): Unit = synchronized {
+ if (info.getFunctionType.equals(FunctionType.BUILTIN)) {
+ builtin.registerFunction(name, info, builder)
+ } else {
+ functionBuilders.put(name, (info, builder))
+ }
+ }
+
+ override def lookupFunction(name: String, children: Seq[Expression]):
Expression = {
+ val func = synchronized {
+
functionBuilders.get(name).map(_._2).orElse(builtin.lookupFunctionBuilder(name)).getOrElse
{
+ throw new AnalysisException(s"undefined function $name")
+ }
+ }
+ func(children)
+ }
+
+ override def listFunction(): Seq[String] = synchronized {
+ (functionBuilders.iterator.map(_._1).toList ++
builtin.listFunction()).distinct.sorted
+ }
+
+ override def lookupFunction(name: String): Option[ExpressionInfo] =
synchronized {
+ functionBuilders.get(name).map(_._1).orElse(builtin.lookupFunction(name))
+ }
+
+ override def lookupFunctionBuilder(name: String): Option[FunctionBuilder] =
synchronized {
+
functionBuilders.get(name).map(_._2).orElse(builtin.lookupFunctionBuilder(name))
+ }
+
+ override def dropFunction(name: String): Boolean = synchronized {
+ functionBuilders.remove(name).isDefined
+ }
+
+ override def clear(): Unit = synchronized {
+ builtin.clear()
+ functionBuilders.clear()
+ }
+
+ override def clone(): SimpleFunctionRegistry = synchronized {
+ val registry = new SystemFunctionRegistry(builtin.clone())
+ functionBuilders.iterator.foreach { case (name, (info, builder)) =>
+ registry.registerFunction(name, info, builder)
+ }
+ registry
+ }
+}
+
/**
* A trivial catalog that returns an error when a function is requested. Used
for testing when all
* functions are already filled in and the analyzer needs only to resolve
attribute references.
@@ -456,6 +509,8 @@ object FunctionRegistry {
fr
}
+ val systemRegistry = new SystemFunctionRegistry(builtin)
+
val functionSet: Set[String] = builtin.listFunction().toSet
/** See usage above. */
@@ -519,7 +574,8 @@ object FunctionRegistry {
}
val clazz = scala.reflect.classTag[Cast].runtimeClass
val usage = "_FUNC_(expr) - Casts the value `expr` to the target data type
`_FUNC_`."
- (name, (new ExpressionInfo(clazz.getCanonicalName, null, name, usage,
null), builder))
+ (name, (new ExpressionInfo(clazz.getCanonicalName, null, name, usage, null,
+ FunctionType.BUILTIN), builder))
}
/**
@@ -529,9 +585,10 @@ object FunctionRegistry {
val clazz = scala.reflect.classTag[T].runtimeClass
val df = clazz.getAnnotation(classOf[ExpressionDescription])
if (df != null) {
- new ExpressionInfo(clazz.getCanonicalName, null, name, df.usage(),
df.extended())
+ new ExpressionInfo(clazz.getCanonicalName, null, name, df.usage(),
df.extended(),
+ ExpressionInfo.FunctionType.BUILTIN)
} else {
- new ExpressionInfo(clazz.getCanonicalName, name)
+ new ExpressionInfo(clazz.getCanonicalName, name,
ExpressionInfo.FunctionType.BUILTIN)
}
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index f6653d384fe1d..ba27645572522 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -34,6 +34,7 @@ import org.apache.spark.sql.catalyst._
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.FunctionBuilder
import org.apache.spark.sql.catalyst.expressions.{Expression, ExpressionInfo}
+import org.apache.spark.sql.catalyst.expressions.ExpressionInfo.FunctionType
import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser,
ParserInterface}
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan,
SubqueryAlias, View}
import org.apache.spark.sql.catalyst.util.StringUtils
@@ -1090,6 +1091,21 @@ class SessionCatalog(
}
}
+ /** Create a temporary macro. */
+ def createTempMacro(
+ name: String,
+ info: ExpressionInfo,
+ functionBuilder: FunctionBuilder): Unit = {
+ functionRegistry.registerFunction(name, info, functionBuilder)
+ }
+
+ /** Drop a temporary macro. */
+ def dropTempMacro(name: String, ignoreIfNotExists: Boolean): Unit = {
+ if (!functionRegistry.dropFunction(name) && !ignoreIfNotExists) {
+ throw AnalysisException.noSuchTempMacroException(name)
+ }
+ }
+
/**
* Returns whether it is a temporary function. If not existed, returns false.
*/
@@ -1126,7 +1142,8 @@ class SessionCatalog(
new ExpressionInfo(
metadata.className,
qualifiedName.database.orNull,
- qualifiedName.identifier)
+ qualifiedName.identifier,
+ FunctionType.PERSISTENT)
} else {
failFunctionLookup(name.funcName)
}
@@ -1248,7 +1265,11 @@ class SessionCatalog(
if (func.database.isDefined) {
dropFunction(func, ignoreIfNotExists = false)
} else {
- dropTempFunction(func.funcName, ignoreIfNotExists = false)
+ val functionType = functionRegistry.lookupFunction(func.funcName)
+ .map(_.getFunctionType).getOrElse(FunctionType.TEMPORARY)
+ if (!functionType.equals(FunctionType.BUILTIN)) {
+ dropTempFunction(func.funcName, ignoreIfNotExists = false)
+ }
}
}
clearTempTables()
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala
index b6675a84ece48..d2a35c5131cfd 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala
@@ -260,7 +260,8 @@ object CreateStruct extends FunctionBuilder {
null,
"struct",
"_FUNC_(col1, col2, col3, ...) - Creates a struct with the given field
values.",
- "")
+ "",
+ ExpressionInfo.FunctionType.BUILTIN)
("struct", (info, this))
}
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 3c58c6e1b6780..05e722bd88a7b 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -34,7 +34,7 @@ import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.execution.command._
import org.apache.spark.sql.execution.datasources.{CreateTable, _}
import org.apache.spark.sql.internal.{HiveSerDe, SQLConf, VariableSubstitution}
-import org.apache.spark.sql.types.StructType
+import org.apache.spark.sql.types.{StructField, StructType}
/**
* Concrete parser for Spark SQL statements.
@@ -715,6 +715,36 @@ class SparkSqlAstBuilder(conf: SQLConf) extends
AstBuilder(conf) {
ctx.TEMPORARY != null)
}
+ /**
+ * Create a [[CreateMacroCommand]] command.
+ *
+ * For example:
+ * {{{
+ * CREATE TEMPORARY MACRO macro_name([col_name col_type, ...]) expression;
+ * }}}
+ */
+ override def visitCreateMacro(ctx: CreateMacroContext): LogicalPlan =
withOrigin(ctx) {
+ val columns = createSchema(ctx.colTypeList)
+ val e = expression(ctx.expression)
+ CreateMacroCommand(
+ ctx.macroName.getText,
+ MacroFunctionWrapper(columns, e))
+ }
+
+ /**
+ * Create a [[DropMacroCommand]] command.
+ *
+ * For example:
+ * {{{
+ * DROP TEMPORARY MACRO [IF EXISTS] macro_name;
+ * }}}
+ */
+ override def visitDropMacro(ctx: DropMacroContext): LogicalPlan =
withOrigin(ctx) {
+ DropMacroCommand(
+ ctx.macroName.getText,
+ ctx.EXISTS != null)
+ }
+
/**
* Create a [[DropTableCommand]] command.
*/
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/macros.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/macros.scala
new file mode 100644
index 0000000000000..d3fbd94e39275
--- /dev/null
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/macros.scala
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution.command
+
+import scala.collection.mutable
+
+import org.apache.spark.sql.{AnalysisException, Row, SparkSession}
+import org.apache.spark.sql.catalyst.expressions._
+import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, Project}
+import org.apache.spark.sql.types.StructType
+
+/**
+ * This class provides arguments and body expression of the macro function.
+ */
+case class MacroFunctionWrapper(columns: StructType, macroFunction: Expression)
+
+
+/**
+ * The DDL command that creates a macro.
+ * To create a temporary macro, the syntax of using this command in SQL is:
+ * {{{
+ * CREATE TEMPORARY MACRO macro_name([col_name col_type, ...]) expression;
+ * }}}
+ */
+case class CreateMacroCommand(
+ macroName: String,
+ funcWrapper: MacroFunctionWrapper)
+ extends RunnableCommand {
+
+ override def run(sparkSession: SparkSession): Seq[Row] = {
+ val catalog = sparkSession.sessionState.catalog
+ val columns = funcWrapper.columns.map(_.name)
+ val columnAttrs = funcWrapper.columns.toAttributes
+ def formatName = if (sparkSession.sessionState.conf.caseSensitiveAnalysis)
{
+ (name: String) => name
+ } else {
+ (name: String) => name.toLowerCase
+ }
+ val colToIndex: Map[String, Int] =
columns.map(formatName).zipWithIndex.toMap
+ if (colToIndex.size != columns.size) {
+ throw new AnalysisException(s"Failed to CREATE TEMPORARY MACRO
$macroName, because " +
+ s"at least one parameter name was used more than once :
${columns.mkString(",")}")
+ }
+
+ val resolvedMacroFunction = try {
+ val plan = Project(Seq(Alias(funcWrapper.macroFunction, "m")()),
LocalRelation(columnAttrs))
+ val analyzed @ Project(Seq(named), _) =
sparkSession.sessionState.analyzer.execute(plan)
+ sparkSession.sessionState.analyzer.checkAnalysis(analyzed)
+ named.children.head
+ } catch {
+ case a: AnalysisException =>
+ throw new AnalysisException(s"Failed to CREATE TEMPORARY MACRO
$macroName, because of " +
+ s"exception: ${a.getMessage}")
+ }
+
+ val foundColumns: mutable.Set[String] = new mutable.HashSet()
+ val macroFunction = resolvedMacroFunction.transform {
+ case u: AttributeReference =>
+ val index = colToIndex.get(formatName(u.name)).getOrElse(
+ throw new AnalysisException(s"Failed to CREATE TEMPORARY MACRO
$macroName, because " +
+ s"it cannot find colName: ${u.name}, actual columns:
${columns.mkString(",")}"))
+ foundColumns.add(formatName(u.name))
+ BoundReference(index, u.dataType, u.nullable)
+ }
+ if (foundColumns.size != columns.size) {
+ throw new AnalysisException(s"Failed to CREATE TEMPORARY MACRO
$macroName, because " +
+ s"expected columns ${foundColumns.mkString(",")} but found
${columns.mkString(",")}")
+ }
+
+ val columnLength: Int = columns.length
+ val info = new ExpressionInfo(macroName, macroName)
+ val builder = (children: Seq[Expression]) => {
+ if (children.size != columnLength) {
+ throw new AnalysisException(s"Arguments length: ${children.size} != " +
+ s"expected number: ${columnLength} of arguments for Macro
$macroName")
+ }
+ macroFunction.transform {
+ // Skip to validate the input type because check it before.
+ case b: BoundReference => children(b.ordinal)
+ }
+ }
+ catalog.createTempMacro(macroName, info, builder)
+ Seq.empty[Row]
+ }
+}
+
+/**
+ * The DDL command that drops a macro.
+ * ifExists: returns an error if the macro doesn't exist, unless this is true.
+ * {{{
+ * DROP TEMPORARY MACRO [IF EXISTS] macro_name;
+ * }}}
+ */
+case class DropMacroCommand(macroName: String, ifExists: Boolean)
+ extends RunnableCommand {
+
+ override def run(sparkSession: SparkSession): Seq[Row] = {
+ val catalog = sparkSession.sessionState.catalog
+ catalog.dropTempMacro(macroName, ifExists)
+ Seq.empty[Row]
+ }
+}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/internal/BaseSessionStateBuilder.scala
b/sql/core/src/main/scala/org/apache/spark/sql/internal/BaseSessionStateBuilder.scala
index 2a801d87b12eb..4f43badbc6cd0 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/internal/BaseSessionStateBuilder.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/internal/BaseSessionStateBuilder.scala
@@ -95,7 +95,7 @@ abstract class BaseSessionStateBuilder(
* This either gets cloned from a pre-existing version or cloned from the
built-in registry.
*/
protected lazy val functionRegistry: FunctionRegistry = {
-
parentState.map(_.functionRegistry).getOrElse(FunctionRegistry.builtin).clone()
+
parentState.map(_.functionRegistry).getOrElse(FunctionRegistry.systemRegistry).clone()
}
/**
diff --git a/sql/core/src/test/resources/sql-tests/inputs/macro.sql
b/sql/core/src/test/resources/sql-tests/inputs/macro.sql
new file mode 100644
index 0000000000000..fbbe5b368c7e9
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/inputs/macro.sql
@@ -0,0 +1,65 @@
+CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x));
+SELECT SIGMOID(2);
+DROP TEMPORARY MACRO SIGMOID;
+
+CREATE TEMPORARY MACRO FIXED_NUMBER() 1;
+SELECT FIXED_NUMBER() + 1;
+DROP TEMPORARY MACRO FIXED_NUMBER;
+
+CREATE TEMPORARY MACRO SIMPLE_ADD (x INT, y INT) x + y;
+SELECT SIMPLE_ADD(1, 9);
+DROP TEMPORARY MACRO SIMPLE_ADD;
+
+CREATE TEMPORARY MACRO flr(d bigint) FLOOR(d/10)*10;
+SELECT flr(12);
+DROP TEMPORARY MACRO flr;
+
+CREATE TEMPORARY MACRO STRING_LEN(x string) length(x);
+CREATE TEMPORARY MACRO STRING_LEN_PLUS_ONE(x string) length(x)+1;
+CREATE TEMPORARY MACRO STRING_LEN_PLUS_TWO(x string) length(x)+2;
+create table macro_test (x string) using parquet;;
+insert into table macro_test values ("bb"), ("a"), ("ccc");
+SELECT CONCAT(STRING_LEN(x), ":", STRING_LEN_PLUS_ONE(x), ":",
STRING_LEN_PLUS_TWO(x)) a
+FROM macro_test;
+SELECT CONCAT(STRING_LEN(x), ":", STRING_LEN_PLUS_ONE(x), ":",
STRING_LEN_PLUS_TWO(x)) a
+FROM
+macro_test
+sort by a;
+drop table macro_test;
+
+CREATE TABLE macro_testing(a int, b int, c int) using parquet;;
+insert into table macro_testing values (1,2,3);
+insert into table macro_testing values (4,5,6);
+CREATE TEMPORARY MACRO math_square(x int) x*x;
+CREATE TEMPORARY MACRO math_add(x int) x+x;
+select math_square(a), math_square(b),factorial(a), factorial(b), math_add(a),
math_add(b),int(c)
+from macro_testing order by int(c);
+drop table macro_testing;
+
+CREATE TEMPORARY MACRO max(x int, y int) x + y;
+SELECT max(1, 2);
+DROP TEMPORARY MACRO max;
+SELECT max(2);
+
+CREATE TEMPORARY MACRO c() 3E9;
+SELECT floor(c()/10);
+DROP TEMPORARY MACRO c;
+
+CREATE TEMPORARY MACRO fixed_number() 42;
+DROP TEMPORARY FUNCTION fixed_number;
+DROP TEMPORARY MACRO IF EXISTS fixed_number;
+
+CREATE TEMPORARY MACRO add_bigint_int(x bigint, y int) x + y;
+SELECT add_bigint_int(1, 1.5);
+DROP TEMPORARY MACRO add_bigint_int;
+
+-- invalid queries
+CREATE TEMPORARY MACRO simple_add_error(x int) x + y;
+CREATE TEMPORARY MACRO simple_add_error(x int, x int) x + y;
+CREATE TEMPORARY MACRO simple_add_error(x int) x NOT IN (select c2);
+DROP TEMPORARY MACRO SOME_MACRO;
+CREATE TEMPORARY MACRO macro_add(x int, y int, z int) x + y;
+CREATE TEMPORARY MACRO macro_add(x int, x int) x + x;
+CREATE TEMPORARY MACRO macro_add(x int, y int) x + y;
+SELECT macro_add(1, 2, 3);
+DROP TEMPORARY MACRO macro_add;
diff --git a/sql/core/src/test/resources/sql-tests/results/macro.sql.out
b/sql/core/src/test/resources/sql-tests/results/macro.sql.out
new file mode 100644
index 0000000000000..aec4b358b7dce
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/results/macro.sql.out
@@ -0,0 +1,420 @@
+-- Automatically generated by SQLQueryTestSuite
+-- Number of queries: 49
+
+
+-- !query 0
+CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))
+-- !query 0 schema
+struct<>
+-- !query 0 output
+
+
+
+-- !query 1
+SELECT SIGMOID(2)
+-- !query 1 schema
+struct<(CAST(1.0 AS DOUBLE) / (CAST(1.0 AS DOUBLE) + EXP(CAST((- 2) AS
DOUBLE)))):double>
+-- !query 1 output
+0.8807970779778823
+
+
+-- !query 2
+DROP TEMPORARY MACRO SIGMOID
+-- !query 2 schema
+struct<>
+-- !query 2 output
+
+
+
+-- !query 3
+CREATE TEMPORARY MACRO FIXED_NUMBER() 1
+-- !query 3 schema
+struct<>
+-- !query 3 output
+
+
+
+-- !query 4
+SELECT FIXED_NUMBER() + 1
+-- !query 4 schema
+struct<(1 + 1):int>
+-- !query 4 output
+2
+
+
+-- !query 5
+DROP TEMPORARY MACRO FIXED_NUMBER
+-- !query 5 schema
+struct<>
+-- !query 5 output
+
+
+
+-- !query 6
+CREATE TEMPORARY MACRO SIMPLE_ADD (x INT, y INT) x + y
+-- !query 6 schema
+struct<>
+-- !query 6 output
+
+
+
+-- !query 7
+SELECT SIMPLE_ADD(1, 9)
+-- !query 7 schema
+struct<(1 + 9):int>
+-- !query 7 output
+10
+
+
+-- !query 8
+DROP TEMPORARY MACRO SIMPLE_ADD
+-- !query 8 schema
+struct<>
+-- !query 8 output
+
+
+
+-- !query 9
+CREATE TEMPORARY MACRO flr(d bigint) FLOOR(d/10)*10
+-- !query 9 schema
+struct<>
+-- !query 9 output
+
+
+
+-- !query 10
+SELECT flr(12)
+-- !query 10 schema
+struct<(FLOOR((CAST(12 AS DOUBLE) / CAST(10 AS DOUBLE))) * CAST(10 AS
BIGINT)):bigint>
+-- !query 10 output
+10
+
+
+-- !query 11
+DROP TEMPORARY MACRO flr
+-- !query 11 schema
+struct<>
+-- !query 11 output
+
+
+
+-- !query 12
+CREATE TEMPORARY MACRO STRING_LEN(x string) length(x)
+-- !query 12 schema
+struct<>
+-- !query 12 output
+
+
+
+-- !query 13
+CREATE TEMPORARY MACRO STRING_LEN_PLUS_ONE(x string) length(x)+1
+-- !query 13 schema
+struct<>
+-- !query 13 output
+
+
+
+-- !query 14
+CREATE TEMPORARY MACRO STRING_LEN_PLUS_TWO(x string) length(x)+2
+-- !query 14 schema
+struct<>
+-- !query 14 output
+
+
+
+-- !query 15
+create table macro_test (x string) using parquet
+-- !query 15 schema
+struct<>
+-- !query 15 output
+
+
+
+-- !query 16
+insert into table macro_test values ("bb"), ("a"), ("ccc")
+-- !query 16 schema
+struct<>
+-- !query 16 output
+
+
+
+-- !query 17
+SELECT CONCAT(STRING_LEN(x), ":", STRING_LEN_PLUS_ONE(x), ":",
STRING_LEN_PLUS_TWO(x)) a
+FROM macro_test
+-- !query 17 schema
+struct<a:string>
+-- !query 17 output
+1:2:3
+2:3:4
+3:4:5
+
+
+-- !query 18
+SELECT CONCAT(STRING_LEN(x), ":", STRING_LEN_PLUS_ONE(x), ":",
STRING_LEN_PLUS_TWO(x)) a
+FROM
+macro_test
+sort by a
+-- !query 18 schema
+struct<a:string>
+-- !query 18 output
+1:2:3
+2:3:4
+3:4:5
+
+
+-- !query 19
+drop table macro_test
+-- !query 19 schema
+struct<>
+-- !query 19 output
+
+
+
+-- !query 20
+CREATE TABLE macro_testing(a int, b int, c int) using parquet
+-- !query 20 schema
+struct<>
+-- !query 20 output
+
+
+
+-- !query 21
+insert into table macro_testing values (1,2,3)
+-- !query 21 schema
+struct<>
+-- !query 21 output
+
+
+
+-- !query 22
+insert into table macro_testing values (4,5,6)
+-- !query 22 schema
+struct<>
+-- !query 22 output
+
+
+
+-- !query 23
+CREATE TEMPORARY MACRO math_square(x int) x*x
+-- !query 23 schema
+struct<>
+-- !query 23 output
+
+
+
+-- !query 24
+CREATE TEMPORARY MACRO math_add(x int) x+x
+-- !query 24 schema
+struct<>
+-- !query 24 output
+
+
+
+-- !query 25
+select math_square(a), math_square(b),factorial(a), factorial(b), math_add(a),
math_add(b),int(c)
+from macro_testing order by int(c)
+-- !query 25 schema
+struct<(a * a):int,(b * b):int,factorial(a):bigint,factorial(b):bigint,(a +
a):int,(b + b):int,c:int>
+-- !query 25 output
+1 4 1 2 2 4 3
+16 25 24 120 8 10 6
+
+
+-- !query 26
+drop table macro_testing
+-- !query 26 schema
+struct<>
+-- !query 26 output
+
+
+
+-- !query 27
+CREATE TEMPORARY MACRO max(x int, y int) x + y
+-- !query 27 schema
+struct<>
+-- !query 27 output
+
+
+
+-- !query 28
+SELECT max(1, 2)
+-- !query 28 schema
+struct<(1 + 2):int>
+-- !query 28 output
+3
+
+
+-- !query 29
+DROP TEMPORARY MACRO max
+-- !query 29 schema
+struct<>
+-- !query 29 output
+
+
+
+-- !query 30
+SELECT max(2)
+-- !query 30 schema
+struct<max(2):int>
+-- !query 30 output
+2
+
+
+-- !query 31
+CREATE TEMPORARY MACRO c() 3E9
+-- !query 31 schema
+struct<>
+-- !query 31 output
+
+
+
+-- !query 32
+SELECT floor(c()/10)
+-- !query 32 schema
+struct<FLOOR((CAST(3E+9 AS DECIMAL(10,0)) / CAST(CAST(10 AS DECIMAL(10,0)) AS
DECIMAL(10,0)))):decimal(11,0)>
+-- !query 32 output
+300000000
+
+
+-- !query 33
+DROP TEMPORARY MACRO c
+-- !query 33 schema
+struct<>
+-- !query 33 output
+
+
+
+-- !query 34
+CREATE TEMPORARY MACRO fixed_number() 42
+-- !query 34 schema
+struct<>
+-- !query 34 output
+
+
+
+-- !query 35
+DROP TEMPORARY FUNCTION fixed_number
+-- !query 35 schema
+struct<>
+-- !query 35 output
+
+
+
+-- !query 36
+DROP TEMPORARY MACRO IF EXISTS fixed_number
+-- !query 36 schema
+struct<>
+-- !query 36 output
+
+
+
+-- !query 37
+CREATE TEMPORARY MACRO add_bigint_int(x bigint, y int) x + y
+-- !query 37 schema
+struct<>
+-- !query 37 output
+
+
+
+-- !query 38
+SELECT add_bigint_int(1, 1.5)
+-- !query 38 schema
+struct<(CAST(1 AS BIGINT) + CAST(1.5 AS BIGINT)):bigint>
+-- !query 38 output
+2
+
+
+-- !query 39
+DROP TEMPORARY MACRO add_bigint_int
+-- !query 39 schema
+struct<>
+-- !query 39 output
+
+
+
+-- !query 40
+multiply
+
+CREATE TEMPORARY MACRO simple_add_error(x int) x + y
+-- !query 40 schema
+struct<>
+-- !query 40 output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+extraneous input 'multiply' expecting {'(', 'SELECT', 'FROM', 'ADD', 'DESC',
'WITH', 'VALUES', 'CREATE', 'TABLE', 'INSERT', 'DELETE', 'DESCRIBE', 'EXPLAIN',
'SHOW', 'USE', 'DROP', 'ALTER', 'MAP', 'SET', 'RESET', 'START', 'COMMIT',
'ROLLBACK', 'REDUCE', 'REFRESH', 'CLEAR', 'CACHE', 'UNCACHE', 'DFS',
'TRUNCATE', 'ANALYZE', 'LIST', 'REVOKE', 'GRANT', 'LOCK', 'UNLOCK', 'MSCK',
'EXPORT', 'IMPORT', 'LOAD'}(line 1, pos 0)
+
+== SQL ==
+multiply
+^^^
+
+CREATE TEMPORARY MACRO simple_add_error(x int) x + y
+
+
+-- !query 41
+CREATE TEMPORARY MACRO simple_add_error(x int, x int) x + y
+-- !query 41 schema
+struct<>
+-- !query 41 output
+org.apache.spark.sql.AnalysisException
+Failed to CREATE TEMPORARY MACRO simple_add_error, because at least one
parameter name was used more than once : x,x;
+
+
+-- !query 42
+CREATE TEMPORARY MACRO simple_add_error(x int) x NOT IN (select c2)
+-- !query 42 schema
+struct<>
+-- !query 42 output
+org.apache.spark.sql.AnalysisException
+Failed to CREATE TEMPORARY MACRO simple_add_error, because of exception:
cannot resolve '`c2`' given input columns: []; line 1 pos 64;
+
+
+-- !query 43
+DROP TEMPORARY MACRO SOME_MACRO
+-- !query 43 schema
+struct<>
+-- !query 43 output
+org.apache.spark.sql.AnalysisException
+Temporary macro 'SOME_MACRO' not found;
+
+
+-- !query 44
+CREATE TEMPORARY MACRO macro_add(x int, y int, z int) x + y
+-- !query 44 schema
+struct<>
+-- !query 44 output
+org.apache.spark.sql.AnalysisException
+Failed to CREATE TEMPORARY MACRO macro_add, because expected columns Set(0, 1)
but found x,y,z;
+
+
+-- !query 45
+CREATE TEMPORARY MACRO macro_add(x int, x int) x + x
+-- !query 45 schema
+struct<>
+-- !query 45 output
+org.apache.spark.sql.AnalysisException
+Failed to CREATE TEMPORARY MACRO macro_add, because at least one parameter
name was used more than once : x,x;
+
+
+-- !query 46
+CREATE TEMPORARY MACRO macro_add(x int, y int) x + y
+-- !query 46 schema
+struct<>
+-- !query 46 output
+
+
+
+-- !query 47
+SELECT macro_add(1, 2, 3)
+-- !query 47 schema
+struct<>
+-- !query 47 output
+org.apache.spark.sql.AnalysisException
+Arguments length: 3 != expected number: 2 of arguments for Macro macro_add;
line 1 pos 7
+
+
+-- !query 48
+DROP TEMPORARY MACRO macro_add
+-- !query 48 schema
+struct<>
+-- !query 48 output
+
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index cf33760360724..2de83cff5ef17 100644
---
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -1153,13 +1153,6 @@ class HiveQuerySuite extends HiveComparisonTest with
SQLTestUtils with BeforeAnd
sql("ALTER INDEX my_index ON my_table set IDXPROPERTIES
(\"prop1\"=\"val1_new\")")}
}
- test("create/drop macro commands are not supported") {
- assertUnsupportedFeature {
- sql("CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))")
- }
- assertUnsupportedFeature { sql("DROP TEMPORARY MACRO SIGMOID") }
- }
-
test("dynamic partitioning is allowed when hive.exec.dynamic.partition.mode
is nonstrict") {
val modeConfKey = "hive.exec.dynamic.partition.mode"
withTable("with_parts") {
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]