maropu commented on a change in pull request #29304:
URL: https://github.com/apache/spark/pull/29304#discussion_r463964874
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
##########
@@ -393,33 +395,42 @@ object PhysicalWindow {
object ExtractSingleColumnNullAwareAntiJoin extends JoinSelectionHelper with
PredicateHelper {
Review comment:
`SingleColumn` -> `MultiColumn`
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
##########
@@ -2678,14 +2678,26 @@ object SQLConf {
.checkValue(_ >= 0, "The value must be non-negative.")
.createWithDefault(8)
+ val OPTIMIZE_NULL_AWARE_ANTI_JOIN_MAX_NUM_KEYS =
+ buildConf("spark.sql.optimizeNullAwareAntiJoin.maxNumKeys")
+ .internal()
+ .doc("The maximum number of keys that will be supported to use NAAJ
optimize. " +
+ "While with NAAJ optimize, buildSide data would be expanded to
(2^numKeys - 1) times, " +
+ "it might cause Driver OOM if NAAJ numKeys increased, since it is
exponential growth. " +
+ "It's ok to increase this configuration if buildSide is small enough
and safe enough " +
+ "to do such exponential expansion to gain performance improvement from
O(M*N) to O(M).")
Review comment:
We need the last two statements above? IMO the first three statements
looks enough.
##########
File path: sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala
##########
@@ -1188,4 +1183,42 @@ class JoinSuite extends QueryTest with
SharedSparkSession with AdaptiveSparkPlan
classOf[BroadcastNestedLoopJoinExec]))
}
}
+
+ test("SPARK-32494: Null Aware Anti Join Optimize Support Multi-Column") {
+ withSQLConf(SQLConf.OPTIMIZE_NULL_AWARE_ANTI_JOIN.key -> "true",
+ SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> Long.MaxValue.toString) {
+ // positive not in subquery case
+ var joinExec = assertJoin((
+ "select * from testData where (key, key + 1) not in (select * from
testData2)",
Review comment:
Could you please use uppercases for SQL keywords where possible? e.g.,
`SELECT * FROM testData...`
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
##########
@@ -393,33 +395,42 @@ object PhysicalWindow {
object ExtractSingleColumnNullAwareAntiJoin extends JoinSelectionHelper with
PredicateHelper {
- // TODO support multi column NULL-aware anti join in future.
- // See. http://www.vldb.org/pvldb/vol2/vldb09-423.pdf Section 6
- // multi-column null aware anti join is much more complicated than single
column ones.
-
// streamedSideKeys, buildSideKeys
private type ReturnType = (Seq[Expression], Seq[Expression])
- /**
- * See. [SPARK-32290]
- * LeftAnti(condition: Or(EqualTo(a=b), IsNull(EqualTo(a=b)))
- * will almost certainly be planned as a Broadcast Nested Loop join,
- * which is very time consuming because it's an O(M*N) calculation.
- * But if it's a single column case O(M*N) calculation could be optimized
into O(M)
- * using hash lookup instead of loop lookup.
- */
def unapply(join: Join): Option[ReturnType] = join match {
- case Join(left, right, LeftAnti,
- Some(Or(e @ EqualTo(leftAttr: AttributeReference, rightAttr:
AttributeReference),
- IsNull(e2 @ EqualTo(_, _)))), _)
- if SQLConf.get.optimizeNullAwareAntiJoin &&
- e.semanticEquals(e2) =>
- if (canEvaluate(leftAttr, left) && canEvaluate(rightAttr, right)) {
- Some(Seq(leftAttr), Seq(rightAttr))
- } else if (canEvaluate(leftAttr, right) && canEvaluate(rightAttr, left))
{
- Some(Seq(rightAttr), Seq(leftAttr))
- } else {
+ case Join(left, right, LeftAnti, condition, _) if
SQLConf.get.optimizeNullAwareAntiJoin =>
+ val predicates = condition.map(splitConjunctivePredicates).getOrElse(Nil)
+ if (predicates.isEmpty ||
+ predicates.length > SQLConf.get.optimizeNullAwareAntiJoinMaxNumKeys) {
None
+ } else {
+ val leftKeys = ArrayBuffer[Expression]()
+ val rightKeys = ArrayBuffer[Expression]()
+
+ // all predicate must match pattern condition: Or(EqualTo(a=b),
IsNull(EqualTo(a=b)))
+ val allMatch = predicates.forall {
Review comment:
nit format; how about this?
```
val joinKeys = ArrayBuffer[(Expression, Expression)]()
// All predicate must match pattern condition: Or(EqualTo(a=b),
IsNull(EqualTo(a=b)))
val allMatch = predicates.forall {
case Or(e @ EqualTo(leftExpr: Expression, rightExpr: Expression),
IsNull(e2 @ EqualTo(_, _))) if e.semanticEquals(e2) =>
if (canEvaluate(leftExpr, left) && canEvaluate(rightExpr,
right)) {
joinKeys += ((leftExpr, rightExpr))
true
} else if (canEvaluate(leftExpr, right) &&
canEvaluate(rightExpr, left)) {
joinKeys += ((rightExpr, leftExpr))
true
} else {
false
}
case _ =>
false
}
if (allMatch) {
Some(joinKeys.unzip)
} else {
None
}
```
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
##########
@@ -2678,14 +2678,26 @@ object SQLConf {
.checkValue(_ >= 0, "The value must be non-negative.")
.createWithDefault(8)
+ val OPTIMIZE_NULL_AWARE_ANTI_JOIN_MAX_NUM_KEYS =
+ buildConf("spark.sql.optimizeNullAwareAntiJoin.maxNumKeys")
+ .internal()
+ .doc("The maximum number of keys that will be supported to use NAAJ
optimize. " +
+ "While with NAAJ optimize, buildSide data would be expanded to
(2^numKeys - 1) times, " +
+ "it might cause Driver OOM if NAAJ numKeys increased, since it is
exponential growth. " +
+ "It's ok to increase this configuration if buildSide is small enough
and safe enough " +
+ "to do such exponential expansion to gain performance improvement from
O(M*N) to O(M).")
+ .intConf
Review comment:
Plz add `checkValue`. I think only a positive value seems reasonable.
##########
File path:
sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastHashJoinExec.scala
##########
@@ -245,7 +244,7 @@ case class BroadcastHashJoinExec(
|boolean $found = false;
|// generate join key for stream side
|${keyEv.code}
- |if ($anyNull) {
+ |if (${ if (isLongHashedRelation) s"$anyNull" else
s"${keyEv.value}.allNull()"}) {
Review comment:
nit: `if (${if (isLongHashedRelation) anyNull else
s"${keyEv.value}.allNull()"}) {`
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/planning/patterns.scala
##########
@@ -393,33 +395,42 @@ object PhysicalWindow {
object ExtractSingleColumnNullAwareAntiJoin extends JoinSelectionHelper with
PredicateHelper {
- // TODO support multi column NULL-aware anti join in future.
- // See. http://www.vldb.org/pvldb/vol2/vldb09-423.pdf Section 6
- // multi-column null aware anti join is much more complicated than single
column ones.
-
// streamedSideKeys, buildSideKeys
private type ReturnType = (Seq[Expression], Seq[Expression])
- /**
- * See. [SPARK-32290]
- * LeftAnti(condition: Or(EqualTo(a=b), IsNull(EqualTo(a=b)))
- * will almost certainly be planned as a Broadcast Nested Loop join,
- * which is very time consuming because it's an O(M*N) calculation.
- * But if it's a single column case O(M*N) calculation could be optimized
into O(M)
- * using hash lookup instead of loop lookup.
- */
def unapply(join: Join): Option[ReturnType] = join match {
- case Join(left, right, LeftAnti,
- Some(Or(e @ EqualTo(leftAttr: AttributeReference, rightAttr:
AttributeReference),
- IsNull(e2 @ EqualTo(_, _)))), _)
- if SQLConf.get.optimizeNullAwareAntiJoin &&
- e.semanticEquals(e2) =>
- if (canEvaluate(leftAttr, left) && canEvaluate(rightAttr, right)) {
- Some(Seq(leftAttr), Seq(rightAttr))
- } else if (canEvaluate(leftAttr, right) && canEvaluate(rightAttr, left))
{
- Some(Seq(rightAttr), Seq(leftAttr))
- } else {
+ case Join(left, right, LeftAnti, condition, _) if
SQLConf.get.optimizeNullAwareAntiJoin =>
+ val predicates = condition.map(splitConjunctivePredicates).getOrElse(Nil)
+ if (predicates.isEmpty ||
+ predicates.length > SQLConf.get.optimizeNullAwareAntiJoinMaxNumKeys) {
None
+ } else {
+ val leftKeys = ArrayBuffer[Expression]()
+ val rightKeys = ArrayBuffer[Expression]()
+
+ // all predicate must match pattern condition: Or(EqualTo(a=b),
IsNull(EqualTo(a=b)))
Review comment:
nit: `all` -> `All`
##########
File path:
sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
##########
@@ -327,11 +327,29 @@ private[joins] object UnsafeHashedRelation {
// Create a mapping of buildKeys -> rows
val keyGenerator = UnsafeProjection.create(key)
var numFields = 0
+ val nullPaddingCombinations: Seq[UnsafeProjection] = if (isNullAware) {
+ // C(numKeys, 0), C(numKeys, 1) ... C(numKeys, numKeys - 1)
Review comment:
I just reformatted it like this;
```
// C(numKeys, 0), C(numKeys, 1) ... C(numKeys, numKeys - 1)
// In total 2^numKeys - 1 records will be appended.
key.indices.flatMap { n =>
key.indices.combinations(n).map { combination =>
// combination is Seq[Int] indicates which key should be replaced
to null padding
val exprs = key.indices.map { index =>
if (combination.contains(index)) {
Literal.create(null, key(index).dataType)
} else {
key(index)
}
}
UnsafeProjection.create(exprs)
}
}
```
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]