aokolnychyi commented on a change in pull request #35395:
URL: https://github.com/apache/spark/pull/35395#discussion_r813117876
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
##########
@@ -446,17 +493,49 @@ object DescribeColumn {
def getOutputAttrs: Seq[Attribute] =
DescribeCommandSchema.describeColumnAttributes()
}
+trait RowLevelCommand extends Command with SupportsSubquery {
+ def condition: Option[Expression]
+ def rewritePlan: Option[LogicalPlan]
+ def withNewRewritePlan(newRewritePlan: LogicalPlan): RowLevelCommand
+}
+
/**
* The logical plan of the DELETE FROM command.
*/
case class DeleteFromTable(
table: LogicalPlan,
- condition: Option[Expression]) extends UnaryCommand with SupportsSubquery {
- override def child: LogicalPlan = table
- override protected def withNewChildInternal(newChild: LogicalPlan):
DeleteFromTable =
- copy(table = newChild)
+ condition: Option[Expression],
+ rewritePlan: Option[LogicalPlan] = None) extends RowLevelCommand {
+
+ override def children: Seq[LogicalPlan] = if (rewritePlan.isDefined) {
+ table :: rewritePlan.get :: Nil
+ } else {
+ table :: Nil
+ }
+
+ override def withNewRewritePlan(newRewritePlan: LogicalPlan):
RowLevelCommand = {
+ copy(rewritePlan = Some(newRewritePlan))
+ }
+
+ override protected def withNewChildrenInternal(
+ newChildren: IndexedSeq[LogicalPlan]): DeleteFromTable = {
+ if (newChildren.size == 1) {
+ copy(table = newChildren.head, rewritePlan = None)
+ } else {
+ require(newChildren.size == 2, "DeleteFromTable expects either one or
two children")
+ val Seq(newTable, newRewritePlan) = newChildren.take(2)
+ copy(table = newTable, rewritePlan = Some(newRewritePlan))
+ }
+ }
}
+/**
+ * The logical plan of the DELETE FROM command that can be executed using data
source filters.
+ */
+case class DeleteFromTableWithFilters(
+ table: LogicalPlan,
+ condition: Seq[sources.Filter]) extends LeafCommand
Review comment:
@cloud-fan, `DeleteFromTableWithFilters` is an optimization for
`SupportsRowLevelOperations`. Existing deletes with filters would be
unaffected. That being said, I am going to combine the existing logic in
`DataSourceV2Strategy` with the optimizer rule I added, like discussed
[here](https://github.com/apache/spark/pull/35395#discussion_r805292918). That
way, we have the filter conversion logic just in one place. Let me know if you
agree with that.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]