fred-db commented on code in PR #38497: URL: https://github.com/apache/spark/pull/38497#discussion_r1026142927
########## sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/subquery.scala: ########## @@ -201,15 +204,17 @@ object RewritePredicateSubquery extends Rule[LogicalPlan] with PredicateHelper { // +- Relation[id#80] parquet val nullAwareJoinConds = inConditions.map(c => Or(c, IsNull(c))) val finalJoinCond = (nullAwareJoinConds ++ conditions).reduceLeft(And) - newPlan = Join(newPlan, newSub, ExistenceJoin(exists), Some(finalJoinCond), JoinHint.NONE) + val joinHint = JoinHint(None, subHint) + newPlan = Join(newPlan, newSub, ExistenceJoin(exists), Some(finalJoinCond), joinHint) Not(exists) - case InSubquery(values, ListQuery(sub, _, _, _, conditions)) => + case InSubquery(values, ListQuery(sub, _, _, _, conditions, subHint)) => val exists = AttributeReference("exists", BooleanType, nullable = false)() // Deduplicate conflicting attributes if any. val newSub = dedupSubqueryOnSelfJoin(newPlan, sub, Some(values)) val inConditions = values.zip(newSub.output).map(EqualTo.tupled) val newConditions = (inConditions ++ conditions).reduceLeftOption(And) - newPlan = Join(newPlan, newSub, ExistenceJoin(exists), newConditions, JoinHint.NONE) + newPlan = + Join(newPlan, newSub, ExistenceJoin(exists), newConditions, JoinHint(None, subHint)) Review Comment: I think the issue I had here is that the scala style guide does not specify how to deal with this. My approach was to first try to put the method invocation on a separate line, if that does not work pull the join hint out, and if this is not enough the existence join as well. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org