aokolnychyi commented on a change in pull request #2132:
URL: https://github.com/apache/iceberg/pull/2132#discussion_r562258176
##########
File path:
spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/optimizer/RewriteMergeInto.scala
##########
@@ -133,16 +133,17 @@ case class RewriteMergeInto(spark: SparkSession) extends
Rule[LogicalPlan] with
case MergeIntoTable(target: DataSourceV2Relation, source: LogicalPlan,
cond, matchedActions, notMatchedActions) =>
- val (mergeBuilder, targetTableScan) =
buildDynamicFilterTargetScan(target, source, cond, matchedActions)
+ val mergeBuilder = target.table.asMergeable.newMergeBuilder("merge",
newWriteInfo(target.schema))
// rewrite the matched actions to ensure there is always an action to
produce the output row
val (matchedConditions, matchedOutputs) =
rewriteMatchedActions(matchedActions, target.output)
// use a full outer join because there are both matched and not
matched actions
val sourceTableProj = source.output ++ Seq(Alias(TRUE_LITERAL,
ROW_FROM_SOURCE)())
+ val newSourceTableScan = Project(sourceTableProj, source)
+ val targetTableScan = buildDynamicFilterTargetScan(mergeBuilder,
target, source, cond, matchedActions)
val targetTableProj = targetTableScan.output ++
Seq(Alias(TRUE_LITERAL, ROW_FROM_TARGET)())
val newTargetTableScan = Project(targetTableProj, targetTableScan)
- val newSourceTableScan = Project(sourceTableProj, source)
Review comment:
Grouped source and target plans.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]