leanken commented on a change in pull request #29104:
URL: https://github.com/apache/spark/pull/29104#discussion_r459815126
##########
File path:
sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastHashJoinExec.scala
##########
@@ -454,6 +491,48 @@ case class BroadcastHashJoinExec(
val (matched, checkCondition, _) = getJoinCondition(ctx, input)
val numOutput = metricTerm(ctx, "numOutputRows")
+ // fast stop if isOriginalInputEmpty = true, should accept all rows in
streamedSide
+ if (broadcastRelation.value.isOriginalInputEmpty) {
+ return s"""
+ |// Anti Join isOriginalInputEmpty(true) accept all
+ |$numOutput.add(1);
+ |${consume(ctx, input)}
+ """.stripMargin
+ }
+
+ if (isNullAwareAntiJoin) {
+ if (broadcastRelation.value.allNullColumnKeyExistsInOriginalInput) {
+ return s"""
+ |// NAAJ
Review comment:
```
/* 026 */ protected void processNext() throws java.io.IOException {
/* 027 */ while ( localtablescan_input_0.hasNext()) {
/* 028 */ InternalRow localtablescan_row_0 = (InternalRow)
localtablescan_input_0.next();
/* 029 */ ((org.apache.spark.sql.execution.metric.SQLMetric)
references[0] /* numOutputRows */).add(1);
/* 030 */ boolean localtablescan_isNull_0 =
localtablescan_row_0.isNullAt(0);
/* 031 */ int localtablescan_value_0 = localtablescan_isNull_0 ?
/* 032 */ -1 : (localtablescan_row_0.getInt(0));
/* 033 */
/* 034 */ // NAAJ
/* 035 */ // isOriginalInputEmpty(false)
allNullColumnKeyExistsInOriginalInput(true)
/* 036 */ // reject all
/* 037 */ if (shouldStop()) return;
/* 038 */ }
/* 039 */ }
```
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]