leanken commented on a change in pull request #29104:
URL: https://github.com/apache/spark/pull/29104#discussion_r461220193



##########
File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
##########
@@ -903,15 +910,55 @@ private[joins] object LongHashedRelation {
       if (!rowKey.isNullAt(0)) {
         val key = rowKey.getLong(0)
         map.append(key, unsafeRow)
+      } else if (isNullAware) {
+        return new EmptyHashedRelationWithAllNullKeys
       }
     }
     map.optimize()
     new LongHashedRelation(numFields, map)
   }
 }
 
+/**
+ * Common trait with dummy implementation for NAAJ special HashedRelation
+ * EmptyHashedRelation
+ * EmptyHashedRelationWithAllNullKeys
+ */
+trait NullAwareHashedRelation extends HashedRelation with Externalizable {
+  override def get(key: InternalRow): Iterator[InternalRow] = null

Review comment:
       done updated

##########
File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala
##########
@@ -903,15 +910,55 @@ private[joins] object LongHashedRelation {
       if (!rowKey.isNullAt(0)) {
         val key = rowKey.getLong(0)
         map.append(key, unsafeRow)
+      } else if (isNullAware) {
+        return new EmptyHashedRelationWithAllNullKeys

Review comment:
       done

##########
File path: sql/core/src/test/resources/sql-tests/inputs/group-by-filter.sql
##########
@@ -1,5 +1,8 @@
 -- Test filter clause for aggregate expression.
 
+--CONFIG_DIM1 spark.sql.optimizeNullAwareAntiJoin=true
+--CONFIG_DIM1 spark.sql.optimizeNullAwareAntiJoin=false
+

Review comment:
       thanks to @cloud-fan I know of this better way to do e2e case coverage 
when adding a new feature.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to