This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.3 by this push:
     new 099fe97  [SPARK-38626][SQL] Make condition in DeleteFromTable plan 
required
099fe97 is described below

commit 099fe9717d8e1317e1001c0409720049abd95fa3
Author: Anton Okolnychyi <aokolnyc...@apple.com>
AuthorDate: Wed Mar 23 09:11:48 2022 +0800

    [SPARK-38626][SQL] Make condition in DeleteFromTable plan required
    
    ### What changes were proposed in this pull request?
    
    This PR makes the condition in `DeleteFromTable` required. Right now, the 
condition is optional and `None` is equivalent to a true literal. As a 
consequence, rules that handle such statements have to catch these two 
different yet equivalent representations, which makes those rules more complex. 
Instead, we can simply default the condition while parsing and make it required.
    
    This change has been discussed and reviewed 
[here](https://github.com/apache/spark/pull/35395#discussion_r815234852).
    ### Why are the changes needed?
    
    These changes are needed to simplify rules that handle `DeleteFromTable` 
plans.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Existing tests.
    
    Closes #35941 from aokolnychyi/spark-38626.
    
    Authored-by: Anton Okolnychyi <aokolnyc...@apple.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
    (cherry picked from commit a89d2897ec803c0c272d4812420f2741880c9612)
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../sql/catalyst/optimizer/ReplaceNullWithFalseInPredicate.scala  | 2 +-
 .../sql/catalyst/optimizer/SimplifyConditionalsInPredicate.scala  | 2 +-
 .../scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala   | 4 ++--
 .../org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala  | 2 +-
 .../org/apache/spark/sql/errors/QueryCompilationErrors.scala      | 2 +-
 .../sql/catalyst/optimizer/PullupCorrelatedPredicatesSuite.scala  | 4 ++--
 .../catalyst/optimizer/ReplaceNullWithFalseInPredicateSuite.scala | 2 +-
 .../catalyst/optimizer/SimplifyConditionalsInPredicateSuite.scala | 2 +-
 .../org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala     | 4 ++--
 .../spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala | 4 ++--
 .../apache/spark/sql/execution/command/PlanResolutionSuite.scala  | 8 ++++----
 11 files changed, 18 insertions(+), 18 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/ReplaceNullWithFalseInPredicate.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/ReplaceNullWithFalseInPredicate.scala
index 3de19af..9ec498a 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/ReplaceNullWithFalseInPredicate.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/ReplaceNullWithFalseInPredicate.scala
@@ -54,7 +54,7 @@ object ReplaceNullWithFalseInPredicate extends 
Rule[LogicalPlan] {
     _.containsAnyPattern(NULL_LITERAL, TRUE_OR_FALSE_LITERAL, INSET), ruleId) {
     case f @ Filter(cond, _) => f.copy(condition = replaceNullWithFalse(cond))
     case j @ Join(_, _, _, Some(cond), _) => j.copy(condition = 
Some(replaceNullWithFalse(cond)))
-    case d @ DeleteFromTable(_, Some(cond)) => d.copy(condition = 
Some(replaceNullWithFalse(cond)))
+    case d @ DeleteFromTable(_, cond) => d.copy(condition = 
replaceNullWithFalse(cond))
     case u @ UpdateTable(_, _, Some(cond)) => u.copy(condition = 
Some(replaceNullWithFalse(cond)))
     case m @ MergeIntoTable(_, _, mergeCond, matchedActions, 
notMatchedActions) =>
       m.copy(
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalsInPredicate.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalsInPredicate.scala
index c08bcbe..e1972b9 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalsInPredicate.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalsInPredicate.scala
@@ -48,7 +48,7 @@ object SimplifyConditionalsInPredicate extends 
Rule[LogicalPlan] {
     _.containsAnyPattern(CASE_WHEN, IF), ruleId) {
     case f @ Filter(cond, _) => f.copy(condition = simplifyConditional(cond))
     case j @ Join(_, _, _, Some(cond), _) => j.copy(condition = 
Some(simplifyConditional(cond)))
-    case d @ DeleteFromTable(_, Some(cond)) => d.copy(condition = 
Some(simplifyConditional(cond)))
+    case d @ DeleteFromTable(_, cond) => d.copy(condition = 
simplifyConditional(cond))
     case u @ UpdateTable(_, _, Some(cond)) => u.copy(condition = 
Some(simplifyConditional(cond)))
   }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 3c8f077..9266388 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -365,9 +365,9 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef] 
with SQLConfHelper wit
     val tableAlias = getTableAliasWithoutColumnAlias(ctx.tableAlias(), 
"DELETE")
     val aliasedTable = tableAlias.map(SubqueryAlias(_, table)).getOrElse(table)
     val predicate = if (ctx.whereClause() != null) {
-      Some(expression(ctx.whereClause().booleanExpression()))
+      expression(ctx.whereClause().booleanExpression())
     } else {
-      None
+      Literal.TrueLiteral
     }
     DeleteFromTable(aliasedTable, predicate)
   }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
index 45465b0..b2ca346 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
@@ -451,7 +451,7 @@ object DescribeColumn {
  */
 case class DeleteFromTable(
     table: LogicalPlan,
-    condition: Option[Expression]) extends UnaryCommand with SupportsSubquery {
+    condition: Expression) extends UnaryCommand with SupportsSubquery {
   override def child: LogicalPlan = table
   override protected def withNewChildInternal(newChild: LogicalPlan): 
DeleteFromTable =
     copy(table = newChild)
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 6bf0ec8..57ed7da 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -775,7 +775,7 @@ object QueryCompilationErrors {
         s"$v2WriteClassName is not an instance of $v1WriteClassName")
   }
 
-  def unsupportedDeleteByConditionWithSubqueryError(condition: 
Option[Expression]): Throwable = {
+  def unsupportedDeleteByConditionWithSubqueryError(condition: Expression): 
Throwable = {
     new AnalysisException(
       s"Delete by condition with subquery is not supported: $condition")
   }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/PullupCorrelatedPredicatesSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/PullupCorrelatedPredicatesSuite.scala
index c4b052b..3ffbb49 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/PullupCorrelatedPredicatesSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/PullupCorrelatedPredicatesSuite.scala
@@ -114,14 +114,14 @@ class PullupCorrelatedPredicatesSuite extends PlanTest {
   test("PullupCorrelatedPredicates should handle deletes") {
     val subPlan = testRelation2.where('a === 'c).select('c)
     val cond = InSubquery(Seq('a), ListQuery(subPlan))
-    val deletePlan = DeleteFromTable(testRelation, Some(cond)).analyze
+    val deletePlan = DeleteFromTable(testRelation, cond).analyze
     assert(deletePlan.resolved)
 
     val optimized = Optimize.execute(deletePlan)
     assert(optimized.resolved)
 
     optimized match {
-      case DeleteFromTable(_, Some(s: InSubquery)) =>
+      case DeleteFromTable(_, s: InSubquery) =>
         val outerRefs = SubExprUtils.getOuterReferences(s.query.plan)
         assert(outerRefs.isEmpty, "should be no outer refs")
       case other =>
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ReplaceNullWithFalseInPredicateSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ReplaceNullWithFalseInPredicateSuite.scala
index e65174b..57698d1 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ReplaceNullWithFalseInPredicateSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ReplaceNullWithFalseInPredicateSuite.scala
@@ -461,7 +461,7 @@ class ReplaceNullWithFalseInPredicateSuite extends PlanTest 
{
   }
 
   private def testDelete(originalCond: Expression, expectedCond: Expression): 
Unit = {
-    test((rel, expr) => DeleteFromTable(rel, Some(expr)), originalCond, 
expectedCond)
+    test((rel, expr) => DeleteFromTable(rel, expr), originalCond, expectedCond)
   }
 
   private def testUpdate(originalCond: Expression, expectedCond: Expression): 
Unit = {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalsInPredicateSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalsInPredicateSuite.scala
index 79db53e..bb6ca54 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalsInPredicateSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyConditionalsInPredicateSuite.scala
@@ -229,7 +229,7 @@ class SimplifyConditionalsInPredicateSuite extends PlanTest 
{
   }
 
   private def testDelete(originalCond: Expression, expectedCond: Expression): 
Unit = {
-    test((rel, expr) => DeleteFromTable(rel, Some(expr)), originalCond, 
expectedCond)
+    test((rel, expr) => DeleteFromTable(rel, expr), originalCond, expectedCond)
   }
 
   private def testUpdate(originalCond: Expression, expectedCond: Expression): 
Unit = {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
index d5d90ce..472506f 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
@@ -1369,14 +1369,14 @@ class DDLParserSuite extends AnalysisTest {
     parseCompare("DELETE FROM testcat.ns1.ns2.tbl",
       DeleteFromTable(
         UnresolvedRelation(Seq("testcat", "ns1", "ns2", "tbl")),
-        None))
+        Literal.TrueLiteral))
   }
 
   test("delete from table: with alias and where clause") {
     parseCompare("DELETE FROM testcat.ns1.ns2.tbl AS t WHERE t.a = 2",
       DeleteFromTable(
         SubqueryAlias("t", UnresolvedRelation(Seq("testcat", "ns1", "ns2", 
"tbl"))),
-        Some(EqualTo(UnresolvedAttribute("t.a"), Literal(2)))))
+        EqualTo(UnresolvedAttribute("t.a"), Literal(2))))
   }
 
   test("delete from table: columns aliases is not allowed") {
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
index c6d271b..c0b00a4 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala
@@ -257,12 +257,12 @@ class DataSourceV2Strategy(session: SparkSession) extends 
Strategy with Predicat
       relation match {
         case DataSourceV2ScanRelation(r, _, output) =>
           val table = r.table
-          if (condition.exists(SubqueryExpression.hasSubquery)) {
+          if (SubqueryExpression.hasSubquery(condition)) {
             throw 
QueryCompilationErrors.unsupportedDeleteByConditionWithSubqueryError(condition)
           }
           // fail if any filter cannot be converted.
           // correctness depends on removing all matching data.
-          val filters = DataSourceStrategy.normalizeExprs(condition.toSeq, 
output)
+          val filters = DataSourceStrategy.normalizeExprs(Seq(condition), 
output)
               .flatMap(splitConjunctivePredicates(_).map {
                 f => DataSourceStrategy.translateFilter(f, true).getOrElse(
                   throw 
QueryCompilationErrors.cannotTranslateExpressionToSourceFilterError(f))
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
index 6cfdbdd..24b6be0 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala
@@ -922,14 +922,14 @@ class PlanResolutionSuite extends AnalysisTest {
       val parsed4 = parseAndResolve(sql4)
 
       parsed1 match {
-        case DeleteFromTable(AsDataSourceV2Relation(_), None) =>
+        case DeleteFromTable(AsDataSourceV2Relation(_), Literal.TrueLiteral) =>
         case _ => fail("Expect DeleteFromTable, but got:\n" + 
parsed1.treeString)
       }
 
       parsed2 match {
         case DeleteFromTable(
           AsDataSourceV2Relation(_),
-          Some(EqualTo(name: UnresolvedAttribute, StringLiteral("Robert")))) =>
+          EqualTo(name: UnresolvedAttribute, StringLiteral("Robert"))) =>
           assert(name.name == "name")
         case _ => fail("Expect DeleteFromTable, but got:\n" + 
parsed2.treeString)
       }
@@ -937,7 +937,7 @@ class PlanResolutionSuite extends AnalysisTest {
       parsed3 match {
         case DeleteFromTable(
           SubqueryAlias(AliasIdentifier("t", Seq()), 
AsDataSourceV2Relation(_)),
-          Some(EqualTo(name: UnresolvedAttribute, StringLiteral("Robert")))) =>
+          EqualTo(name: UnresolvedAttribute, StringLiteral("Robert"))) =>
           assert(name.name == "t.name")
         case _ => fail("Expect DeleteFromTable, but got:\n" + 
parsed3.treeString)
       }
@@ -945,7 +945,7 @@ class PlanResolutionSuite extends AnalysisTest {
       parsed4 match {
         case DeleteFromTable(
             SubqueryAlias(AliasIdentifier("t", Seq()), 
AsDataSourceV2Relation(_)),
-            Some(InSubquery(values, query))) =>
+            InSubquery(values, query)) =>
           assert(values.size == 1 && 
values.head.isInstanceOf[UnresolvedAttribute])
           assert(values.head.asInstanceOf[UnresolvedAttribute].name == 
"t.name")
           query match {

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to