leanken commented on a change in pull request #29104:
URL: https://github.com/apache/spark/pull/29104#discussion_r454996484



##########
File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoinExec.scala
##########
@@ -205,6 +226,117 @@ case class BroadcastNestedLoopJoinExec(
     }
   }
 
+  case class NotInSubquerySingleColumnOptimizeParams(
+      buildSideHashSet: mutable.HashSet[AnyRef],
+      isNullExists: Boolean,
+      isBuildRowsEmpty: Boolean)
+
+  private def notInSubquerySingleColumnOptimizeEnabled: Boolean = {
+    if (SQLConf.get.notInSubquerySingleColumnOptimizeEnabled && 
right.output.length == 1) {
+      // buildSide must be single column
+      // and condition must be either of following pattern
+      // or(a=b,isnull(a=b))
+      // or(isnull(a=b),a=b)
+      condition.get match {
+        case _@Or(_@EqualTo(leftAttr: AttributeReference, rightAttr: 
AttributeReference),
+            _@IsNull(_@EqualTo(tmpLeft: AttributeReference, tmpRight: 
AttributeReference)))
+            if leftAttr.semanticEquals(tmpLeft) && 
rightAttr.semanticEquals(tmpRight) =>
+          notInSubquerySingleColumnOptimizeSetStreamedKey(leftAttr, rightAttr)
+          if (notInSubquerySingleColumnOptimizeStreamedKeyIndex != -1) {
+            true
+          } else {
+            logWarning(s"failed to find 
notInSubquerySingleColumnOptimizeStreamedKeyIndex," +
+              s" fallback to leftExistenceJoin.")
+            false
+          }
+        case _@Or(_@IsNull(_@EqualTo(tmpLeft: AttributeReference, tmpRight: 
AttributeReference)),
+            _@EqualTo(leftAttr: AttributeReference, rightAttr: 
AttributeReference))
+            if leftAttr.semanticEquals(tmpLeft) && 
rightAttr.semanticEquals(tmpRight) =>
+          notInSubquerySingleColumnOptimizeSetStreamedKey(leftAttr, rightAttr)
+          if (notInSubquerySingleColumnOptimizeStreamedKeyIndex != -1) {
+            true
+          } else {
+            logWarning(s"failed to find 
notInSubquerySingleColumnOptimizeStreamedKeyIndex," +
+              s" fallback to leftExistenceJoin.")
+            false

Review comment:
       I check on the source code on subquery.scala, found that
   Or(EqualTo(a, b), IsNull(EqualTo(a, b))) will be the only option, there is 
no need to handle two Or pattern. so i remove the duplicate code.
   
   ```
   # See. org/apache/spark/sql/catalyst/optimizer/subquery.scala
   val inConditions = values.zip(sub.output).map(EqualTo.tupled)
   val nullAwareJoinConds = inConditions.map(c => Or(c, IsNull(c)))
   ```




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to