peter-toth commented on a change in pull request #32396: URL: https://github.com/apache/spark/pull/32396#discussion_r624452389
########## File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/PullOutGroupingExpressions.scala ########## @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.catalyst.optimizer + +import scala.collection.mutable + +import org.apache.spark.sql.catalyst.expressions.{Alias, Expression, NamedExpression} +import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression +import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, LogicalPlan, Project} +import org.apache.spark.sql.catalyst.rules.Rule + +/** + * This rule ensures that [[Aggregate]] nodes doesn't contain complex grouping expressions in the + * optimization phase. + * + * Complex grouping expressions are pulled out to a [[Project]] node under [[Aggregate]] and are + * referenced in both grouping expressions and aggregate expressions without aggregate functions. + * These references ensure that optimization rules don't change the aggregate expressions to invalid + * ones that no longer refer to any grouping expressions and also simplify the expression + * transformations on the node (need to transform the expression only once). + * + * For example, in the following query Spark shouldn't optimize the aggregate expression + * `Not(IsNull(c))` to `IsNotNull(c)` as the grouping expression is `IsNull(c)`: + * SELECT not(c IS NULL) + * FROM t + * GROUP BY c IS NULL + * Instead, the aggregate expression references a `_groupingexpression` attribute: + * Aggregate [_groupingexpression#233], [NOT _groupingexpression#233 AS (NOT (c IS NULL))#230] + * +- Project [isnull(c#219) AS _groupingexpression#233] + * +- LocalRelation [c#219] + */ +object PullOutGroupingExpressions extends Rule[LogicalPlan] { + override def apply(plan: LogicalPlan): LogicalPlan = { + plan transform { + case a: Aggregate if a.resolved => + val complexGroupingExpressionMap = mutable.LinkedHashMap.empty[Expression, NamedExpression] + val newGroupingExpressions = a.groupingExpressions.map { + case e if !e.foldable && e.children.nonEmpty => + complexGroupingExpressionMap + .getOrElseUpdate(e.canonicalized, Alias(e, s"_groupingexpression")()) + .toAttribute + case o => o + } + if (complexGroupingExpressionMap.nonEmpty) { + def replaceComplexGroupingExpressions(e: Expression): Expression = { + e match { + case _ if AggregateExpression.isAggregate(e) => e + case _ if e.foldable => e + case _ if complexGroupingExpressionMap.contains(e.canonicalized) => + complexGroupingExpressionMap.get(e.canonicalized).map(_.toAttribute).getOrElse(e) + case _ => e.mapChildren(replaceComplexGroupingExpressions) + } + } + + val newAggregateExpressions = a.aggregateExpressions + .map(replaceComplexGroupingExpressions(_).asInstanceOf[NamedExpression]) Review comment: I think I'm leaving this PR as it is now. But tested that https://github.com/peter-toth/spark/commit/ed374fe046f5bb0a65b2bf72129b37df9cdce2ea could work, just I need to cast `TreePatternBits` to `Expression`. Although, I wonder if it would make sense to split plan and expression pruning in the future like this: https://github.com/peter-toth/spark/commit/d817fc724a78b8aa2de59999f237346d412e46dd and so this pruning (and probably there are other similar use cases where we want to stop traversal) became simpler: https://github.com/peter-toth/spark/commit/d817fc724a78b8aa2de59999f237346d412e46dd#diff-57201016f79912c165715811d7f7f37e2acbef2ae7b241c3c8a0b928d0052eb5R61 -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
