Github user mgaido91 commented on a diff in the pull request:
https://github.com/apache/spark/pull/22326#discussion_r216151151
--- Diff:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala
---
@@ -1149,6 +1149,51 @@ object PushPredicateThroughJoin extends
Rule[LogicalPlan] with PredicateHelper {
(leftEvaluateCondition, rightEvaluateCondition, commonCondition ++
nonDeterministic)
}
+ private def tryToGetCrossType(commonJoinCondition: Seq[Expression], j:
LogicalPlan) = {
+ if (SQLConf.get.crossJoinEnabled) {
+ // if condition expression is unevaluable, it will be removed from
+ // the new join conditions, if all conditions is unevaluable, we
should
+ // change the join type to CrossJoin.
+ logWarning(s"The whole commonJoinCondition:$commonJoinCondition of
the join " +
+ "plan is unevaluable, it will be ignored and the join plan will be
" +
+ s"turned to cross join. This plan shows below:\n $j")
+ Cross
+ } else {
+ // if the crossJoinEnabled is false, an AnalysisException will throw
by
+ // CheckCartesianProducts, we throw firstly here for better readable
information.
+ throw new AnalysisException("Detected the whole
commonJoinCondition:" +
+ s"$commonJoinCondition of the join plan is unevaluable, we need to
cast the " +
+ "join to cross join by setting the configuration variable " +
+ s"${SQLConf.CROSS_JOINS_ENABLED.key}=true")
+ }
+ }
+
+ /**
+ * Generate new join by pushing down the side only join filter, split
commonJoinCondition
--- End diff --
nit: `filters`
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]