cloud-fan commented on a change in pull request #32476: URL: https://github.com/apache/spark/pull/32476#discussion_r630765647
########## File path: sql/core/src/main/scala/org/apache/spark/sql/execution/joins/SortMergeJoinExec.scala ########## @@ -603,21 +681,55 @@ case class SortMergeJoinExec( val thisPlan = ctx.addReferenceObj("plan", this) val eagerCleanup = s"$thisPlan.cleanupResources();" - s""" - |while (findNextJoinRows($streamedInput, $bufferedInput)) { - | ${streamedVarDecl.mkString("\n")} - | ${beforeLoop.trim} - | scala.collection.Iterator<UnsafeRow> $iterator = $matches.generateIterator(); - | while ($iterator.hasNext()) { - | InternalRow $bufferedRow = (InternalRow) $iterator.next(); - | ${condCheck.trim} - | $numOutput.add(1); - | ${consume(ctx, resultVars)} - | } - | if (shouldStop()) return; - |} - |$eagerCleanup + lazy val innerJoin = + s""" + |while (findNextJoinRows($streamedInput, $bufferedInput)) { + | ${streamedVarDecl.mkString("\n")} + | ${beforeLoop.trim} + | scala.collection.Iterator<UnsafeRow> $iterator = $matches.generateIterator(); + | while ($iterator.hasNext()) { + | InternalRow $bufferedRow = (InternalRow) $iterator.next(); + | ${condCheck.trim} + | $numOutput.add(1); + | ${consume(ctx, resultVars)} + | } + | if (shouldStop()) return; + |} + |$eagerCleanup """.stripMargin + + lazy val outerJoin = { + val foundMatch = ctx.freshName("foundMatch") + s""" + |while ($streamedInput.hasNext()) { + | findNextJoinRows($streamedInput, $bufferedInput); + | ${streamedVarDecl.mkString("\n")} + | ${beforeLoop.trim} + | scala.collection.Iterator<UnsafeRow> $iterator = $matches.generateIterator(); + | boolean $foundMatch = false; Review comment: This name is a bit confusing, as we will set it to true even if there is no match. How about `boolean firstIteration = true;`? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org