This is an automated email from the ASF dual-hosted git repository.
fchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 11dcd30e8 [KYUUBI #1265] `OPTIMIZE` where clause expression support
11dcd30e8 is described below
commit 11dcd30e884c751e38534046e75106818abcd789
Author: Fu Chen <[email protected]>
AuthorDate: Wed Jul 5 10:21:49 2023 +0800
[KYUUBI #1265] `OPTIMIZE` where clause expression support
### _Why are the changes needed?_
to close #1265
After this PR, the following case will work
```sql
CREATE TABLE p (c1 INT, c2 INT, c3 INT) PARTITIONED BY (event_date DATE);
OPTIMIZE p where event_date = current_date() ZORDER BY c1, c2;
```
### _How was this patch tested?_
- [ ] Add some test cases that check the changes thoroughly including
negative and positive cases if possible
- [ ] Add screenshots for manual tests if appropriate
- [x] [Run
test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests)
locally before make a pull request
Closes #2893 from cfmcgrady/where-expression-support.
Closes #1265
97ac710f0 [Fu Chen] Merge remote-tracking branch 'apache/master' into
where-expression-support
c188f0b3d [Fu Chen] fix style
e5f7409d6 [Fu Chen] move verifyPartitionPredicates to
KyuubiSparkSQLAstBuilder
f7234abba [Fu Chen] fix style
95d314122 [Fu Chen] fork PredicateHelper.isLikelySelective
1e596e3dd [Fu Chen] partition predicates constraint
541e373cc [Fu Chen] fix
06d9efdf0 [Fu Chen] adapt to spark-3.1/spark-3.2 suite
867263673 [Fu Chen] fix style
b6801b279 [Fu Chen] add test case
79ab60554 [Fu Chen] fix suite bug
cf1b16ee7 [Fu Chen] fix style
dc0ebd908 [Fu Chen] add ut
286d94cc6 [Fu Chen] fix style
1736d18f6 [Fu Chen] adapt to spark-3.1/spark-3.2
04e88a5aa [Fu Chen] fix nep
59103095b [Fu Chen] simplify logical
59fba01e4 [Fu Chen] adapt to spark-3.1
e6477a9c5 [Fu Chen] remove unused
855283e20 [Fu Chen] where clause expression support
Authored-by: Fu Chen <[email protected]>
Signed-off-by: Fu Chen <[email protected]>
---
.../apache/kyuubi/sql/KyuubiSparkSQLParser.scala | 10 +-
.../scala/org/apache/spark/sql/ZorderSuite.scala | 18 +-
.../apache/kyuubi/sql/KyuubiSparkSQLParser.scala | 10 +-
.../scala/org/apache/spark/sql/ZorderSuite.scala | 18 +-
.../apache/kyuubi/sql/KyuubiSparkSQLParser.scala | 10 +-
.../scala/org/apache/spark/sql/ZorderSuite.scala | 21 +-
.../antlr4/org/apache/kyuubi/sql/KyuubiSparkSQL.g4 | 55 +--
.../kyuubi/sql/KyuubiSparkSQLAstBuilder.scala | 460 +++++----------------
.../sql/zorder/OptimizeZorderStatementBase.scala | 15 +-
.../kyuubi/sql/zorder/ResolveZorderBase.scala | 2 +-
.../org/apache/spark/sql/ZorderSuiteBase.scala | 103 ++++-
11 files changed, 267 insertions(+), 455 deletions(-)
diff --git
a/extensions/spark/kyuubi-extension-spark-3-1/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLParser.scala
b/extensions/spark/kyuubi-extension-spark-3-1/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLParser.scala
index 2f12a82e2..87c10bc34 100644
---
a/extensions/spark/kyuubi-extension-spark-3-1/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLParser.scala
+++
b/extensions/spark/kyuubi-extension-spark-3-1/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLParser.scala
@@ -21,19 +21,21 @@ import org.antlr.v4.runtime._
import org.antlr.v4.runtime.atn.PredictionMode
import org.antlr.v4.runtime.misc.{Interval, ParseCancellationException}
import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
+import org.apache.spark.sql.catalyst.{FunctionIdentifier, SQLConfHelper,
TableIdentifier}
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.parser.{ParseErrorListener,
ParseException, ParserInterface, PostProcessor}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.trees.Origin
import org.apache.spark.sql.types.{DataType, StructType}
-abstract class KyuubiSparkSQLParserBase extends ParserInterface {
+abstract class KyuubiSparkSQLParserBase extends ParserInterface with
SQLConfHelper {
def delegate: ParserInterface
- def astBuilder: KyuubiSparkSQLAstBuilderBase
+ def astBuilder: KyuubiSparkSQLAstBuilder
override def parsePlan(sqlText: String): LogicalPlan = parse(sqlText) {
parser =>
astBuilder.visit(parser.singleStatement()) match {
+ case optimize: UnparsedPredicateOptimize =>
+ astBuilder.buildOptimizeStatement(optimize, delegate.parseExpression)
case plan: LogicalPlan => plan
case _ => delegate.parsePlan(sqlText)
}
@@ -105,7 +107,7 @@ abstract class KyuubiSparkSQLParserBase extends
ParserInterface {
class SparkKyuubiSparkSQLParser(
override val delegate: ParserInterface)
extends KyuubiSparkSQLParserBase {
- def astBuilder: KyuubiSparkSQLAstBuilderBase = new KyuubiSparkSQLAstBuilder
+ def astBuilder: KyuubiSparkSQLAstBuilder = new KyuubiSparkSQLAstBuilder
}
/* Copied from Apache Spark's to avoid dependency on Spark Internals */
diff --git
a/extensions/spark/kyuubi-extension-spark-3-1/src/test/scala/org/apache/spark/sql/ZorderSuite.scala
b/extensions/spark/kyuubi-extension-spark-3-1/src/test/scala/org/apache/spark/sql/ZorderSuite.scala
index fd04e27db..29a166abf 100644
---
a/extensions/spark/kyuubi-extension-spark-3-1/src/test/scala/org/apache/spark/sql/ZorderSuite.scala
+++
b/extensions/spark/kyuubi-extension-spark-3-1/src/test/scala/org/apache/spark/sql/ZorderSuite.scala
@@ -17,6 +17,20 @@
package org.apache.spark.sql
-class ZorderWithCodegenEnabledSuite extends ZorderWithCodegenEnabledSuiteBase
{}
+import org.apache.spark.sql.catalyst.parser.ParserInterface
-class ZorderWithCodegenDisabledSuite extends
ZorderWithCodegenDisabledSuiteBase {}
+import org.apache.kyuubi.sql.SparkKyuubiSparkSQLParser
+
+trait ParserSuite { self: ZorderSuiteBase =>
+ override def createParser: ParserInterface = {
+ new SparkKyuubiSparkSQLParser(spark.sessionState.sqlParser)
+ }
+}
+
+class ZorderWithCodegenEnabledSuite
+ extends ZorderWithCodegenEnabledSuiteBase
+ with ParserSuite {}
+
+class ZorderWithCodegenDisabledSuite
+ extends ZorderWithCodegenDisabledSuiteBase
+ with ParserSuite {}
diff --git
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLParser.scala
b/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLParser.scala
index 2f12a82e2..87c10bc34 100644
---
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLParser.scala
+++
b/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLParser.scala
@@ -21,19 +21,21 @@ import org.antlr.v4.runtime._
import org.antlr.v4.runtime.atn.PredictionMode
import org.antlr.v4.runtime.misc.{Interval, ParseCancellationException}
import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
+import org.apache.spark.sql.catalyst.{FunctionIdentifier, SQLConfHelper,
TableIdentifier}
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.parser.{ParseErrorListener,
ParseException, ParserInterface, PostProcessor}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.trees.Origin
import org.apache.spark.sql.types.{DataType, StructType}
-abstract class KyuubiSparkSQLParserBase extends ParserInterface {
+abstract class KyuubiSparkSQLParserBase extends ParserInterface with
SQLConfHelper {
def delegate: ParserInterface
- def astBuilder: KyuubiSparkSQLAstBuilderBase
+ def astBuilder: KyuubiSparkSQLAstBuilder
override def parsePlan(sqlText: String): LogicalPlan = parse(sqlText) {
parser =>
astBuilder.visit(parser.singleStatement()) match {
+ case optimize: UnparsedPredicateOptimize =>
+ astBuilder.buildOptimizeStatement(optimize, delegate.parseExpression)
case plan: LogicalPlan => plan
case _ => delegate.parsePlan(sqlText)
}
@@ -105,7 +107,7 @@ abstract class KyuubiSparkSQLParserBase extends
ParserInterface {
class SparkKyuubiSparkSQLParser(
override val delegate: ParserInterface)
extends KyuubiSparkSQLParserBase {
- def astBuilder: KyuubiSparkSQLAstBuilderBase = new KyuubiSparkSQLAstBuilder
+ def astBuilder: KyuubiSparkSQLAstBuilder = new KyuubiSparkSQLAstBuilder
}
/* Copied from Apache Spark's to avoid dependency on Spark Internals */
diff --git
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/ZorderSuite.scala
b/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/ZorderSuite.scala
index fd04e27db..29a166abf 100644
---
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/ZorderSuite.scala
+++
b/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/ZorderSuite.scala
@@ -17,6 +17,20 @@
package org.apache.spark.sql
-class ZorderWithCodegenEnabledSuite extends ZorderWithCodegenEnabledSuiteBase
{}
+import org.apache.spark.sql.catalyst.parser.ParserInterface
-class ZorderWithCodegenDisabledSuite extends
ZorderWithCodegenDisabledSuiteBase {}
+import org.apache.kyuubi.sql.SparkKyuubiSparkSQLParser
+
+trait ParserSuite { self: ZorderSuiteBase =>
+ override def createParser: ParserInterface = {
+ new SparkKyuubiSparkSQLParser(spark.sessionState.sqlParser)
+ }
+}
+
+class ZorderWithCodegenEnabledSuite
+ extends ZorderWithCodegenEnabledSuiteBase
+ with ParserSuite {}
+
+class ZorderWithCodegenDisabledSuite
+ extends ZorderWithCodegenDisabledSuiteBase
+ with ParserSuite {}
diff --git
a/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLParser.scala
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLParser.scala
index af1711ebb..c4418c33c 100644
---
a/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLParser.scala
+++
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLParser.scala
@@ -21,19 +21,21 @@ import org.antlr.v4.runtime._
import org.antlr.v4.runtime.atn.PredictionMode
import org.antlr.v4.runtime.misc.{Interval, ParseCancellationException}
import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
+import org.apache.spark.sql.catalyst.{FunctionIdentifier, SQLConfHelper,
TableIdentifier}
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.parser.{ParseErrorListener,
ParseException, ParserInterface, PostProcessor}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.trees.Origin
import org.apache.spark.sql.types.{DataType, StructType}
-abstract class KyuubiSparkSQLParserBase extends ParserInterface {
+abstract class KyuubiSparkSQLParserBase extends ParserInterface with
SQLConfHelper {
def delegate: ParserInterface
- def astBuilder: KyuubiSparkSQLAstBuilderBase
+ def astBuilder: KyuubiSparkSQLAstBuilder
override def parsePlan(sqlText: String): LogicalPlan = parse(sqlText) {
parser =>
astBuilder.visit(parser.singleStatement()) match {
+ case optimize: UnparsedPredicateOptimize =>
+ astBuilder.buildOptimizeStatement(optimize, delegate.parseExpression)
case plan: LogicalPlan => plan
case _ => delegate.parsePlan(sqlText)
}
@@ -113,7 +115,7 @@ abstract class KyuubiSparkSQLParserBase extends
ParserInterface {
class SparkKyuubiSparkSQLParser(
override val delegate: ParserInterface)
extends KyuubiSparkSQLParserBase {
- def astBuilder: KyuubiSparkSQLAstBuilderBase = new KyuubiSparkSQLAstBuilder
+ def astBuilder: KyuubiSparkSQLAstBuilder = new KyuubiSparkSQLAstBuilder
}
/* Copied from Apache Spark's to avoid dependency on Spark Internals */
diff --git
a/extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/ZorderSuite.scala
b/extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/ZorderSuite.scala
index 90fc17e24..a08366f1d 100644
---
a/extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/ZorderSuite.scala
+++
b/extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/ZorderSuite.scala
@@ -17,13 +17,14 @@
package org.apache.spark.sql
+import org.apache.spark.sql.catalyst.parser.ParserInterface
import org.apache.spark.sql.catalyst.plans.logical.{RebalancePartitions, Sort}
import org.apache.spark.sql.internal.SQLConf
-import org.apache.kyuubi.sql.KyuubiSQLConf
+import org.apache.kyuubi.sql.{KyuubiSQLConf, SparkKyuubiSparkSQLParser}
import org.apache.kyuubi.sql.zorder.Zorder
-trait ZorderWithCodegenEnabledSuiteBase33 extends
ZorderWithCodegenEnabledSuiteBase {
+trait ZorderSuiteSpark33 extends ZorderSuiteBase {
test("Add rebalance before zorder") {
Seq("true" -> false, "false" -> true).foreach { case (useOriginalOrdering,
zorder) =>
@@ -106,6 +107,18 @@ trait ZorderWithCodegenEnabledSuiteBase33 extends
ZorderWithCodegenEnabledSuiteB
}
}
-class ZorderWithCodegenEnabledSuite extends
ZorderWithCodegenEnabledSuiteBase33 {}
+trait ParserSuite { self: ZorderSuiteBase =>
+ override def createParser: ParserInterface = {
+ new SparkKyuubiSparkSQLParser(spark.sessionState.sqlParser)
+ }
+}
+
+class ZorderWithCodegenEnabledSuite
+ extends ZorderWithCodegenEnabledSuiteBase
+ with ZorderSuiteSpark33
+ with ParserSuite {}
-class ZorderWithCodegenDisabledSuite extends
ZorderWithCodegenEnabledSuiteBase33 {}
+class ZorderWithCodegenDisabledSuite
+ extends ZorderWithCodegenDisabledSuiteBase
+ with ZorderSuiteSpark33
+ with ParserSuite {}
diff --git
a/extensions/spark/kyuubi-extension-spark-common/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSparkSQL.g4
b/extensions/spark/kyuubi-extension-spark-common/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSparkSQL.g4
index 63e2bf848..e52b7f5cf 100644
---
a/extensions/spark/kyuubi-extension-spark-common/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSparkSQL.g4
+++
b/extensions/spark/kyuubi-extension-spark-common/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSparkSQL.g4
@@ -55,53 +55,23 @@ statement
;
whereClause
- : WHERE booleanExpression
+ : WHERE partitionPredicate = predicateToken
;
zorderClause
: ZORDER BY order+=multipartIdentifier (',' order+=multipartIdentifier)*
;
-booleanExpression
- : query
#logicalQuery
- | left=booleanExpression operator=AND right=booleanExpression
#logicalBinary
- | left=booleanExpression operator=OR right=booleanExpression
#logicalBinary
- ;
-
-query
- : '('? multipartIdentifier comparisonOperator constant ')'?
- ;
-
-comparisonOperator
- : EQ | NEQ | NEQJ | LT | LTE | GT | GTE | NSEQ
- ;
-
-constant
- : NULL #nullLiteral
- | identifier STRING #typeConstructor
- | number #numericLiteral
- | booleanValue #booleanLiteral
- | STRING+ #stringLiteral
+// We don't have an expression rule in our grammar here, so we just grab the
tokens and defer
+// parsing them to later.
+predicateToken
+ : .+?
;
multipartIdentifier
: parts+=identifier ('.' parts+=identifier)*
;
-booleanValue
- : TRUE | FALSE
- ;
-
-number
- : MINUS? DECIMAL_VALUE #decimalLiteral
- | MINUS? INTEGER_VALUE #integerLiteral
- | MINUS? BIGINT_LITERAL #bigIntLiteral
- | MINUS? SMALLINT_LITERAL #smallIntLiteral
- | MINUS? TINYINT_LITERAL #tinyIntLiteral
- | MINUS? DOUBLE_LITERAL #doubleLiteral
- | MINUS? BIGDECIMAL_LITERAL #bigDecimalLiteral
- ;
-
identifier
: strictIdentifier
;
@@ -136,7 +106,6 @@ BY: 'BY';
FALSE: 'FALSE';
DATE: 'DATE';
INTERVAL: 'INTERVAL';
-NULL: 'NULL';
OPTIMIZE: 'OPTIMIZE';
OR: 'OR';
TABLE: 'TABLE';
@@ -145,22 +114,8 @@ TRUE: 'TRUE';
WHERE: 'WHERE';
ZORDER: 'ZORDER';
-EQ : '=' | '==';
-NSEQ: '<=>';
-NEQ : '<>';
-NEQJ: '!=';
-LT : '<';
-LTE : '<=' | '!>';
-GT : '>';
-GTE : '>=' | '!<';
-
MINUS: '-';
-STRING
- : '\'' ( ~('\''|'\\') | ('\\' .) )* '\''
- | '"' ( ~('"'|'\\') | ('\\' .) )* '"'
- ;
-
BIGINT_LITERAL
: DIGIT+ 'L'
;
diff --git
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLAstBuilder.scala
b/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLAstBuilder.scala
index 82e3e6da5..c937bd575 100644
---
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLAstBuilder.scala
+++
b/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLAstBuilder.scala
@@ -17,37 +17,81 @@
package org.apache.kyuubi.sql
-import java.time.LocalDate
-import java.util.Locale
-
import scala.collection.JavaConverters.asScalaBufferConverter
-import scala.collection.mutable.{ArrayBuffer, ListBuffer}
-import scala.util.control.NonFatal
+import scala.collection.mutable.ListBuffer
import org.antlr.v4.runtime.ParserRuleContext
-import org.antlr.v4.runtime.tree.{ParseTree, TerminalNode}
-import org.apache.commons.codec.binary.Hex
-import org.apache.spark.sql.AnalysisException
+import org.antlr.v4.runtime.misc.Interval
+import org.antlr.v4.runtime.tree.ParseTree
+import org.apache.spark.sql.catalyst.SQLConfHelper
import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute,
UnresolvedRelation, UnresolvedStar}
import org.apache.spark.sql.catalyst.expressions._
-import org.apache.spark.sql.catalyst.parser.ParseException
-import org.apache.spark.sql.catalyst.parser.ParserUtils.{string,
stringWithoutUnescape, withOrigin}
+import org.apache.spark.sql.catalyst.parser.ParserUtils.withOrigin
import org.apache.spark.sql.catalyst.plans.logical.{Filter, LogicalPlan,
Project, Sort}
-import org.apache.spark.sql.catalyst.util.DateTimeUtils.{getZoneId,
localDateToDays, stringToTimestamp}
-import org.apache.spark.sql.catalyst.util.IntervalUtils
-import org.apache.spark.sql.hive.HiveAnalysis.conf
-import org.apache.spark.sql.internal.SQLConf
-import org.apache.spark.sql.types._
-import org.apache.spark.unsafe.types.UTF8String
import org.apache.kyuubi.sql.KyuubiSparkSQLParser._
-import org.apache.kyuubi.sql.zorder.{OptimizeZorderStatement,
OptimizeZorderStatementBase, Zorder, ZorderBase}
+import org.apache.kyuubi.sql.zorder.{OptimizeZorderStatement, Zorder}
+
+class KyuubiSparkSQLAstBuilder extends KyuubiSparkSQLBaseVisitor[AnyRef] with
SQLConfHelper {
+
+ def buildOptimizeStatement(
+ unparsedPredicateOptimize: UnparsedPredicateOptimize,
+ parseExpression: String => Expression): LogicalPlan = {
-abstract class KyuubiSparkSQLAstBuilderBase extends
KyuubiSparkSQLBaseVisitor[AnyRef] {
- def buildZorder(child: Seq[Expression]): ZorderBase
- def buildOptimizeZorderStatement(
- tableIdentifier: Seq[String],
- query: LogicalPlan): OptimizeZorderStatementBase
+ val UnparsedPredicateOptimize(tableIdent, tablePredicate, orderExpr) =
+ unparsedPredicateOptimize
+
+ val predicate = tablePredicate.map(parseExpression)
+ verifyPartitionPredicates(predicate)
+ val table = UnresolvedRelation(tableIdent)
+ val tableWithFilter = predicate match {
+ case Some(expr) => Filter(expr, table)
+ case None => table
+ }
+ val query =
+ Sort(
+ SortOrder(orderExpr, Ascending, NullsLast, Seq.empty) :: Nil,
+ conf.getConf(KyuubiSQLConf.ZORDER_GLOBAL_SORT_ENABLED),
+ Project(Seq(UnresolvedStar(None)), tableWithFilter))
+ OptimizeZorderStatement(tableIdent, query)
+ }
+
+ private def verifyPartitionPredicates(predicates: Option[Expression]): Unit
= {
+ predicates.foreach {
+ case p if !isLikelySelective(p) =>
+ throw new KyuubiSQLExtensionException(s"unsupported partition
predicates: ${p.sql}")
+ case _ =>
+ }
+ }
+
+ /**
+ * Forked from Apache Spark's
org.apache.spark.sql.catalyst.expressions.PredicateHelper
+ * The `PredicateHelper.isLikelySelective()` is available since Spark-3.3,
forked for Spark
+ * that is lower than 3.3.
+ *
+ * Returns whether an expression is likely to be selective
+ */
+ private def isLikelySelective(e: Expression): Boolean = e match {
+ case Not(expr) => isLikelySelective(expr)
+ case And(l, r) => isLikelySelective(l) || isLikelySelective(r)
+ case Or(l, r) => isLikelySelective(l) && isLikelySelective(r)
+ case _: StringRegexExpression => true
+ case _: BinaryComparison => true
+ case _: In | _: InSet => true
+ case _: StringPredicate => true
+ case BinaryPredicate(_) => true
+ case _: MultiLikeBase => true
+ case _ => false
+ }
+
+ private object BinaryPredicate {
+ def unapply(expr: Expression): Option[Expression] = expr match {
+ case _: Contains => Option(expr)
+ case _: StartsWith => Option(expr)
+ case _: EndsWith => Option(expr)
+ case _ => None
+ }
+ }
/**
* Create an expression from the given context. This method just passes the
context on to the
@@ -62,21 +106,12 @@ abstract class KyuubiSparkSQLAstBuilderBase extends
KyuubiSparkSQLBaseVisitor[An
}
override def visitOptimizeZorder(
- ctx: OptimizeZorderContext): LogicalPlan = withOrigin(ctx) {
+ ctx: OptimizeZorderContext): UnparsedPredicateOptimize = withOrigin(ctx)
{
val tableIdent = multiPart(ctx.multipartIdentifier())
- val table = UnresolvedRelation(tableIdent)
-
- val whereClause =
- if (ctx.whereClause() == null) {
- None
- } else {
- Option(expression(ctx.whereClause().booleanExpression()))
- }
- val tableWithFilter = whereClause match {
- case Some(expr) => Filter(expr, table)
- case None => table
- }
+ val predicate = Option(ctx.whereClause())
+ .map(_.partitionPredicate)
+ .map(extractRawText(_))
val zorderCols = ctx.zorderClause().order.asScala
.map(visitMultipartIdentifier)
@@ -86,83 +121,13 @@ abstract class KyuubiSparkSQLAstBuilderBase extends
KyuubiSparkSQLBaseVisitor[An
if (zorderCols.length == 1) {
zorderCols.head
} else {
- buildZorder(zorderCols)
+ Zorder(zorderCols)
}
- val query =
- Sort(
- SortOrder(orderExpr, Ascending, NullsLast, Seq.empty) :: Nil,
- conf.getConf(KyuubiSQLConf.ZORDER_GLOBAL_SORT_ENABLED),
- Project(Seq(UnresolvedStar(None)), tableWithFilter))
-
- buildOptimizeZorderStatement(tableIdent, query)
+ UnparsedPredicateOptimize(tableIdent, predicate, orderExpr)
}
override def visitPassThrough(ctx: PassThroughContext): LogicalPlan = null
- override def visitQuery(ctx: QueryContext): Expression = withOrigin(ctx) {
- val left = new UnresolvedAttribute(multiPart(ctx.multipartIdentifier()))
- val right = expression(ctx.constant())
- val operator =
ctx.comparisonOperator().getChild(0).asInstanceOf[TerminalNode]
- operator.getSymbol.getType match {
- case KyuubiSparkSQLParser.EQ =>
- EqualTo(left, right)
- case KyuubiSparkSQLParser.NSEQ =>
- EqualNullSafe(left, right)
- case KyuubiSparkSQLParser.NEQ | KyuubiSparkSQLParser.NEQJ =>
- Not(EqualTo(left, right))
- case KyuubiSparkSQLParser.LT =>
- LessThan(left, right)
- case KyuubiSparkSQLParser.LTE =>
- LessThanOrEqual(left, right)
- case KyuubiSparkSQLParser.GT =>
- GreaterThan(left, right)
- case KyuubiSparkSQLParser.GTE =>
- GreaterThanOrEqual(left, right)
- }
- }
-
- override def visitLogicalBinary(ctx: LogicalBinaryContext): Expression =
withOrigin(ctx) {
- val expressionType = ctx.operator.getType
- val expressionCombiner = expressionType match {
- case KyuubiSparkSQLParser.AND => And.apply _
- case KyuubiSparkSQLParser.OR => Or.apply _
- }
-
- // Collect all similar left hand contexts.
- val contexts = ArrayBuffer(ctx.right)
- var current = ctx.left
- def collectContexts: Boolean = current match {
- case lbc: LogicalBinaryContext if lbc.operator.getType == expressionType
=>
- contexts += lbc.right
- current = lbc.left
- true
- case _ =>
- contexts += current
- false
- }
- while (collectContexts) {
- // No body - all updates take place in the collectContexts.
- }
-
- // Reverse the contexts to have them in the same sequence as in the SQL
statement & turn them
- // into expressions.
- val expressions = contexts.reverseMap(expression)
-
- // Create a balanced tree.
- def reduceToExpressionTree(low: Int, high: Int): Expression = high - low
match {
- case 0 =>
- expressions(low)
- case 1 =>
- expressionCombiner(expressions(low), expressions(high))
- case x =>
- val mid = low + x / 2
- expressionCombiner(
- reduceToExpressionTree(low, mid),
- reduceToExpressionTree(mid + 1, high))
- }
- reduceToExpressionTree(0, expressions.size - 1)
- }
-
override def visitMultipartIdentifier(ctx: MultipartIdentifierContext):
Seq[String] =
withOrigin(ctx) {
ctx.parts.asScala.map(_.getText)
@@ -177,273 +142,32 @@ abstract class KyuubiSparkSQLAstBuilderBase extends
KyuubiSparkSQLBaseVisitor[An
res
}
- /**
- * Create a NULL literal expression.
- */
- override def visitNullLiteral(ctx: NullLiteralContext): Literal =
withOrigin(ctx) {
- Literal(null)
- }
-
- /**
- * Create a Boolean literal expression.
- */
- override def visitBooleanLiteral(ctx: BooleanLiteralContext): Literal =
withOrigin(ctx) {
- if (ctx.getText.toBoolean) {
- Literal.TrueLiteral
- } else {
- Literal.FalseLiteral
- }
- }
-
- /**
- * Create a typed Literal expression. A typed literal has the following SQL
syntax:
- * {{{
- * [TYPE] '[VALUE]'
- * }}}
- * Currently Date, Timestamp, Interval and Binary typed literals are
supported.
- */
- override def visitTypeConstructor(ctx: TypeConstructorContext): Literal =
withOrigin(ctx) {
- val value = string(ctx.STRING)
- val valueType = ctx.identifier.getText.toUpperCase(Locale.ROOT)
-
- def toLiteral[T](f: UTF8String => Option[T], t: DataType): Literal = {
- f(UTF8String.fromString(value)).map(Literal(_, t)).getOrElse {
- throw new ParseException(s"Cannot parse the $valueType value: $value",
ctx)
- }
- }
- try {
- valueType match {
- case "DATE" =>
- toLiteral(stringToDate, DateType)
- case "TIMESTAMP" =>
- val zoneId = getZoneId(SQLConf.get.sessionLocalTimeZone)
- toLiteral(stringToTimestamp(_, zoneId), TimestampType)
- case "INTERVAL" =>
- val interval =
- try {
- IntervalUtils.stringToInterval(UTF8String.fromString(value))
- } catch {
- case e: IllegalArgumentException =>
- val ex = new ParseException("Cannot parse the INTERVAL value:
" + value, ctx)
- ex.setStackTrace(e.getStackTrace)
- throw ex
- }
- Literal(interval, CalendarIntervalType)
- case "X" =>
- val padding = if (value.length % 2 != 0) "0" else ""
-
- Literal(Hex.decodeHex(padding + value))
- case other =>
- throw new ParseException(s"Literals of type '$other' are currently
not supported.", ctx)
- }
- } catch {
- case e: IllegalArgumentException =>
- val message = Option(e.getMessage).getOrElse(s"Exception parsing
$valueType")
- throw new ParseException(message, ctx)
- }
- }
-
- /**
- * Create a String literal expression.
- */
- override def visitStringLiteral(ctx: StringLiteralContext): Literal =
withOrigin(ctx) {
- Literal(createString(ctx))
- }
-
- /**
- * Create a decimal literal for a regular decimal number.
- */
- override def visitDecimalLiteral(ctx: DecimalLiteralContext): Literal =
withOrigin(ctx) {
- Literal(BigDecimal(ctx.getText).underlying())
- }
-
- /** Create a numeric literal expression. */
- private def numericLiteral(
- ctx: NumberContext,
- rawStrippedQualifier: String,
- minValue: BigDecimal,
- maxValue: BigDecimal,
- typeName: String)(converter: String => Any): Literal = withOrigin(ctx) {
- try {
- val rawBigDecimal = BigDecimal(rawStrippedQualifier)
- if (rawBigDecimal < minValue || rawBigDecimal > maxValue) {
- throw new ParseException(
- s"Numeric literal ${rawStrippedQualifier} does not " +
- s"fit in range [${minValue}, ${maxValue}] for type ${typeName}",
- ctx)
- }
- Literal(converter(rawStrippedQualifier))
- } catch {
- case e: NumberFormatException =>
- throw new ParseException(e.getMessage, ctx)
- }
- }
-
- /**
- * Create a Byte Literal expression.
- */
- override def visitTinyIntLiteral(ctx: TinyIntLiteralContext): Literal = {
- val rawStrippedQualifier = ctx.getText.substring(0, ctx.getText.length - 1)
- numericLiteral(
- ctx,
- rawStrippedQualifier,
- Byte.MinValue,
- Byte.MaxValue,
- ByteType.simpleString)(_.toByte)
- }
-
- /**
- * Create an integral literal expression. The code selects the most narrow
integral type
- * possible, either a BigDecimal, a Long or an Integer is returned.
- */
- override def visitIntegerLiteral(ctx: IntegerLiteralContext): Literal =
withOrigin(ctx) {
- BigDecimal(ctx.getText) match {
- case v if v.isValidInt =>
- Literal(v.intValue)
- case v if v.isValidLong =>
- Literal(v.longValue)
- case v => Literal(v.underlying())
- }
- }
-
- /**
- * Create a Short Literal expression.
- */
- override def visitSmallIntLiteral(ctx: SmallIntLiteralContext): Literal = {
- val rawStrippedQualifier = ctx.getText.substring(0, ctx.getText.length - 1)
- numericLiteral(
- ctx,
- rawStrippedQualifier,
- Short.MinValue,
- Short.MaxValue,
- ShortType.simpleString)(_.toShort)
- }
-
- /**
- * Create a Long Literal expression.
- */
- override def visitBigIntLiteral(ctx: BigIntLiteralContext): Literal = {
- val rawStrippedQualifier = ctx.getText.substring(0, ctx.getText.length - 1)
- numericLiteral(
- ctx,
- rawStrippedQualifier,
- Long.MinValue,
- Long.MaxValue,
- LongType.simpleString)(_.toLong)
- }
-
- /**
- * Create a Double Literal expression.
- */
- override def visitDoubleLiteral(ctx: DoubleLiteralContext): Literal = {
- val rawStrippedQualifier = ctx.getText.substring(0, ctx.getText.length - 1)
- numericLiteral(
- ctx,
- rawStrippedQualifier,
- Double.MinValue,
- Double.MaxValue,
- DoubleType.simpleString)(_.toDouble)
- }
-
- /**
- * Create a BigDecimal Literal expression.
- */
- override def visitBigDecimalLiteral(ctx: BigDecimalLiteralContext): Literal
= {
- val raw = ctx.getText.substring(0, ctx.getText.length - 2)
- try {
- Literal(BigDecimal(raw).underlying())
- } catch {
- case e: AnalysisException =>
- throw new ParseException(e.message, ctx)
- }
- }
-
- /**
- * Create a String from a string literal context. This supports multiple
consecutive string
- * literals, these are concatenated, for example this expression "'hello'
'world'" will be
- * converted into "helloworld".
- *
- * Special characters can be escaped by using Hive/C-style escaping.
- */
- private def createString(ctx: StringLiteralContext): String = {
- if (conf.escapedStringLiterals) {
- ctx.STRING().asScala.map(stringWithoutUnescape).mkString
- } else {
- ctx.STRING().asScala.map(string).mkString
- }
- }
-
private def typedVisit[T](ctx: ParseTree): T = {
ctx.accept(this).asInstanceOf[T]
}
- private def stringToDate(s: UTF8String): Option[Int] = {
- def isValidDigits(segment: Int, digits: Int): Boolean = {
- // An integer is able to represent a date within [+-]5 million years.
- val maxDigitsYear = 7
- (segment == 0 && digits >= 4 && digits <= maxDigitsYear) ||
- (segment != 0 && digits > 0 && digits <= 2)
- }
- if (s == null || s.trimAll().numBytes() == 0) {
- return None
- }
- val segments: Array[Int] = Array[Int](1, 1, 1)
- var sign = 1
- var i = 0
- var currentSegmentValue = 0
- var currentSegmentDigits = 0
- val bytes = s.trimAll().getBytes
- var j = 0
- if (bytes(j) == '-' || bytes(j) == '+') {
- sign = if (bytes(j) == '-') -1 else 1
- j += 1
- }
- while (j < bytes.length && (i < 3 && !(bytes(j) == ' ' || bytes(j) ==
'T'))) {
- val b = bytes(j)
- if (i < 2 && b == '-') {
- if (!isValidDigits(i, currentSegmentDigits)) {
- return None
- }
- segments(i) = currentSegmentValue
- currentSegmentValue = 0
- currentSegmentDigits = 0
- i += 1
- } else {
- val parsedValue = b - '0'.toByte
- if (parsedValue < 0 || parsedValue > 9) {
- return None
- } else {
- currentSegmentValue = currentSegmentValue * 10 + parsedValue
- currentSegmentDigits += 1
- }
- }
- j += 1
- }
- if (!isValidDigits(i, currentSegmentDigits)) {
- return None
- }
- if (i < 2 && j < bytes.length) {
- // For the `yyyy` and `yyyy-[m]m` formats, entire input must be consumed.
- return None
- }
- segments(i) = currentSegmentValue
- try {
- val localDate = LocalDate.of(sign * segments(0), segments(1),
segments(2))
- Some(localDateToDays(localDate))
- } catch {
- case NonFatal(_) => None
- }
+ private def extractRawText(exprContext: ParserRuleContext): String = {
+ // Extract the raw expression which will be parsed later
+ exprContext.getStart.getInputStream.getText(new Interval(
+ exprContext.getStart.getStartIndex,
+ exprContext.getStop.getStopIndex))
}
}
-class KyuubiSparkSQLAstBuilder extends KyuubiSparkSQLAstBuilderBase {
- override def buildZorder(child: Seq[Expression]): ZorderBase = {
- Zorder(child)
- }
+/**
+ * a logical plan contains an unparsed expression that will be parsed by spark.
+ */
+trait UnparsedExpressionLogicalPlan extends LogicalPlan {
+ override def output: Seq[Attribute] = throw new
UnsupportedOperationException()
- override def buildOptimizeZorderStatement(
- tableIdentifier: Seq[String],
- query: LogicalPlan): OptimizeZorderStatementBase = {
- OptimizeZorderStatement(tableIdentifier, query)
- }
+ override def children: Seq[LogicalPlan] = throw new
UnsupportedOperationException()
+
+ protected def withNewChildrenInternal(
+ newChildren: IndexedSeq[LogicalPlan]): LogicalPlan =
+ throw new UnsupportedOperationException()
}
+
+case class UnparsedPredicateOptimize(
+ tableIdent: Seq[String],
+ tablePredicate: Option[String],
+ orderExpr: Expression) extends UnparsedExpressionLogicalPlan {}
diff --git
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/OptimizeZorderStatementBase.scala
b/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/OptimizeZorderStatementBase.scala
index a9bb5a5d7..895f9e24b 100644
---
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/OptimizeZorderStatementBase.scala
+++
b/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/OptimizeZorderStatementBase.scala
@@ -20,24 +20,15 @@ package org.apache.kyuubi.sql.zorder
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, UnaryNode}
-/**
- * A zorder statement that contains we parsed from SQL.
- * We should convert this plan to certain command at Analyzer.
- */
-abstract class OptimizeZorderStatementBase extends UnaryNode {
- def tableIdentifier: Seq[String]
- def query: LogicalPlan
- override def child: LogicalPlan = query
- override def output: Seq[Attribute] = child.output
-}
-
/**
* A zorder statement that contains we parsed from SQL.
* We should convert this plan to certain command at Analyzer.
*/
case class OptimizeZorderStatement(
tableIdentifier: Seq[String],
- query: LogicalPlan) extends OptimizeZorderStatementBase {
+ query: LogicalPlan) extends UnaryNode {
+ override def child: LogicalPlan = query
+ override def output: Seq[Attribute] = child.output
protected def withNewChildInternal(newChild: LogicalPlan): LogicalPlan =
copy(query = newChild)
}
diff --git
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/ResolveZorderBase.scala
b/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/ResolveZorderBase.scala
index cdead0b06..9f735caa7 100644
---
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/ResolveZorderBase.scala
+++
b/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/ResolveZorderBase.scala
@@ -57,7 +57,7 @@ abstract class ResolveZorderBase extends Rule[LogicalPlan] {
}
override def apply(plan: LogicalPlan): LogicalPlan = plan match {
- case statement: OptimizeZorderStatementBase if statement.query.resolved =>
+ case statement: OptimizeZorderStatement if statement.query.resolved =>
checkQueryAllowed(statement.query)
val tableIdentifier = getTableIdentifier(statement.tableIdentifier)
val catalogTable =
session.sessionState.catalog.getTableMetadata(tableIdentifier)
diff --git
a/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
b/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
index f48d11e15..e0f86f85d 100644
---
a/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
+++
b/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
@@ -18,9 +18,11 @@
package org.apache.spark.sql
import org.apache.spark.SparkConf
-import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.expressions.{Alias, Ascending,
AttributeReference, Expression, ExpressionEvalHelper, Literal, NullsLast,
SortOrder}
-import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan,
OneRowRelation, Project, Sort}
+import org.apache.spark.sql.catalyst.{InternalRow, TableIdentifier}
+import org.apache.spark.sql.catalyst.analysis.{UnresolvedAttribute,
UnresolvedFunction, UnresolvedRelation, UnresolvedStar}
+import org.apache.spark.sql.catalyst.expressions.{Alias, Ascending,
AttributeReference, EqualTo, Expression, ExpressionEvalHelper, Literal,
NullsLast, SortOrder}
+import org.apache.spark.sql.catalyst.parser.{ParseException, ParserInterface}
+import org.apache.spark.sql.catalyst.plans.logical.{Filter, LogicalPlan,
OneRowRelation, Project, Sort}
import
org.apache.spark.sql.execution.command.CreateDataSourceTableAsSelectCommand
import
org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand
import org.apache.spark.sql.functions._
@@ -29,7 +31,7 @@ import org.apache.spark.sql.internal.{SQLConf, StaticSQLConf}
import org.apache.spark.sql.types._
import org.apache.kyuubi.sql.{KyuubiSQLConf, KyuubiSQLExtensionException}
-import org.apache.kyuubi.sql.zorder.{OptimizeZorderCommandBase, Zorder,
ZorderBytesUtils}
+import org.apache.kyuubi.sql.zorder.{OptimizeZorderCommandBase,
OptimizeZorderStatement, Zorder, ZorderBytesUtils}
trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest with
ExpressionEvalHelper {
override def sparkConf(): SparkConf = {
@@ -654,6 +656,99 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest
with ExpressionEvalHel
ZorderBytesUtils.interleaveBitsDefault(inputs.map(ZorderBytesUtils.toByteArray).toArray)))
}
}
+
+ test("OPTIMIZE command is parsed as expected") {
+ val parser = createParser
+ val globalSort = spark.conf.get(KyuubiSQLConf.ZORDER_GLOBAL_SORT_ENABLED)
+
+ assert(parser.parsePlan("OPTIMIZE p zorder by c1") ===
+ OptimizeZorderStatement(
+ Seq("p"),
+ Sort(
+ SortOrder(UnresolvedAttribute("c1"), Ascending, NullsLast,
Seq.empty) :: Nil,
+ globalSort,
+ Project(Seq(UnresolvedStar(None)),
UnresolvedRelation(TableIdentifier("p"))))))
+
+ assert(parser.parsePlan("OPTIMIZE p zorder by c1, c2") ===
+ OptimizeZorderStatement(
+ Seq("p"),
+ Sort(
+ SortOrder(
+ Zorder(Seq(UnresolvedAttribute("c1"), UnresolvedAttribute("c2"))),
+ Ascending,
+ NullsLast,
+ Seq.empty) :: Nil,
+ globalSort,
+ Project(Seq(UnresolvedStar(None)),
UnresolvedRelation(TableIdentifier("p"))))))
+
+ assert(parser.parsePlan("OPTIMIZE p where id = 1 zorder by c1") ===
+ OptimizeZorderStatement(
+ Seq("p"),
+ Sort(
+ SortOrder(UnresolvedAttribute("c1"), Ascending, NullsLast,
Seq.empty) :: Nil,
+ globalSort,
+ Project(
+ Seq(UnresolvedStar(None)),
+ Filter(
+ EqualTo(UnresolvedAttribute("id"), Literal(1)),
+ UnresolvedRelation(TableIdentifier("p")))))))
+
+ assert(parser.parsePlan("OPTIMIZE p where id = 1 zorder by c1, c2") ===
+ OptimizeZorderStatement(
+ Seq("p"),
+ Sort(
+ SortOrder(
+ Zorder(Seq(UnresolvedAttribute("c1"), UnresolvedAttribute("c2"))),
+ Ascending,
+ NullsLast,
+ Seq.empty) :: Nil,
+ globalSort,
+ Project(
+ Seq(UnresolvedStar(None)),
+ Filter(
+ EqualTo(UnresolvedAttribute("id"), Literal(1)),
+ UnresolvedRelation(TableIdentifier("p")))))))
+
+ assert(parser.parsePlan("OPTIMIZE p where id = current_date() zorder by
c1") ===
+ OptimizeZorderStatement(
+ Seq("p"),
+ Sort(
+ SortOrder(UnresolvedAttribute("c1"), Ascending, NullsLast,
Seq.empty) :: Nil,
+ globalSort,
+ Project(
+ Seq(UnresolvedStar(None)),
+ Filter(
+ EqualTo(
+ UnresolvedAttribute("id"),
+ UnresolvedFunction("current_date", Seq.empty, false)),
+ UnresolvedRelation(TableIdentifier("p")))))))
+
+ // TODO: add following case support
+ intercept[ParseException] {
+ parser.parsePlan("OPTIMIZE p zorder by (c1)")
+ }
+
+ intercept[ParseException] {
+ parser.parsePlan("OPTIMIZE p zorder by (c1, c2)")
+ }
+ }
+
+ test("OPTIMIZE partition predicates constraint") {
+ withTable("p") {
+ sql("CREATE TABLE p (c1 INT, c2 INT) PARTITIONED BY (event_date DATE)")
+ val e1 = intercept[KyuubiSQLExtensionException] {
+ sql("OPTIMIZE p WHERE event_date = current_date as c ZORDER BY c1, c2")
+ }
+ assert(e1.getMessage.contains("unsupported partition predicates"))
+
+ val e2 = intercept[KyuubiSQLExtensionException] {
+ sql("OPTIMIZE p WHERE c1 = 1 ZORDER BY c1, c2")
+ }
+ assert(e2.getMessage == "Only partition column filters are allowed")
+ }
+ }
+
+ def createParser: ParserInterface
}
trait ZorderWithCodegenEnabledSuiteBase extends ZorderSuiteBase {