cloud-fan commented on a change in pull request #32470:
URL: https://github.com/apache/spark/pull/32470#discussion_r649661183



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
##########
@@ -2457,164 +2450,127 @@ class Analyzer(override val catalogManager: 
CatalogManager)
       _.containsPattern(AGGREGATE), ruleId) {
       // Resolve aggregate with having clause to Filter(..., Aggregate()). 
Note, to avoid wrongly
       // resolve the having condition expression, here we skip resolving it in 
ResolveReferences
-      // and transform it to Filter after aggregate is resolved. See more 
details in SPARK-31519.
+      // and transform it to Filter after aggregate is resolved. Basically 
columns in HAVING should
+      // be resolved with `agg.child.output` first. See more details in 
SPARK-31519.
       case UnresolvedHaving(cond, agg: Aggregate) if agg.resolved =>
-        resolveHaving(Filter(cond, agg), agg)
-
-      case f @ Filter(_, agg: Aggregate) if agg.resolved =>
-        resolveHaving(f, agg)
-
-      case sort @ Sort(sortOrder, global, aggregate: Aggregate) if 
aggregate.resolved =>
-
-        // Try resolving the ordering as though it is in the aggregate clause.
-        try {
-          // If a sort order is unresolved, containing references not in 
aggregate, or containing
-          // `AggregateExpression`, we need to push down it to the underlying 
aggregate operator.
-          val unresolvedSortOrders = sortOrder.filter { s =>
-            !s.resolved || !s.references.subsetOf(aggregate.outputSet) || 
containsAggregate(s)
+        resolveOperatorWithAggregate(Seq(cond), agg, (newExprs, newChild) => {
+          Filter(newExprs.head, newChild)
+        })
+
+      case Filter(cond, agg: Aggregate) if agg.resolved =>
+        // We should resolve the references normally based on child.output 
first.
+        val maybeResolved = resolveExpressionByPlanOutput(cond, agg)
+        resolveOperatorWithAggregate(Seq(maybeResolved), agg, (newExprs, 
newChild) => {
+          Filter(newExprs.head, newChild)
+        })
+
+      case Sort(sortOrder, global, agg: Aggregate) if agg.resolved =>
+        // We should resolve the references normally based on child.output 
first.
+        val maybeResolved = 
sortOrder.map(_.child).map(resolveExpressionByPlanOutput(_, agg))
+        resolveOperatorWithAggregate(maybeResolved, agg, (newExprs, newChild) 
=> {
+          val newSortOrder = sortOrder.zip(newExprs).map {
+            case (sortOrder, expr) => sortOrder.copy(child = expr)
           }
-          val aliasedOrdering = unresolvedSortOrders.map(o => Alias(o.child, 
"aggOrder")())
-
-          val aggregateWithExtraOrdering = aggregate.copy(
-            aggregateExpressions = aggregate.aggregateExpressions ++ 
aliasedOrdering)
-
-          val resolvedAggregate: Aggregate =
-            
executeSameContext(aggregateWithExtraOrdering).asInstanceOf[Aggregate]
-
-          val (reResolvedAggExprs, resolvedAliasedOrdering) =
-            
resolvedAggregate.aggregateExpressions.splitAt(aggregate.aggregateExpressions.length)
-
-          // If we pass the analysis check, then the ordering expressions 
should only reference to
-          // aggregate expressions or grouping expressions, and it's safe to 
push them down to
-          // Aggregate.
-          checkAnalysis(resolvedAggregate)
-
-          val originalAggExprs = 
aggregate.aggregateExpressions.map(trimNonTopLevelAliases)
-
-          // If the ordering expression is same with original aggregate 
expression, we don't need
-          // to push down this ordering expression and can reference the 
original aggregate
-          // expression instead.
-          val needsPushDown = ArrayBuffer.empty[NamedExpression]
-          val orderToAlias = unresolvedSortOrders.zip(aliasedOrdering)
-          val evaluatedOrderings =
-            
resolvedAliasedOrdering.asInstanceOf[Seq[Alias]].zip(orderToAlias).map {
-              case (evaluated, (order, aliasOrder)) =>
-                val index = reResolvedAggExprs.indexWhere {
-                  case Alias(child, _) => child semanticEquals evaluated.child
-                  case other => other semanticEquals evaluated.child
-                }
-
-                if (index == -1) {
-                  if (hasCharVarchar(evaluated)) {
-                    needsPushDown += aliasOrder
-                    order.copy(child = aliasOrder)
-                  } else {
-                    needsPushDown += evaluated
-                    order.copy(child = evaluated.toAttribute)
-                  }
-                } else {
-                  order.copy(child = originalAggExprs(index).toAttribute)
-                }
+          Sort(newSortOrder, global, newChild)
+        })
+    }
+
+    def resolveExprsWithAggregate(
+        exprs: Seq[Expression],
+        agg: Aggregate): (Seq[NamedExpression], Seq[Expression]) = {
+      val aggregateExpressions = ArrayBuffer.empty[NamedExpression]
+      val transformed = exprs.map { e =>
+        // Try resolving the expression as though it is in the aggregate 
clause.
+        def resolveCol(input: Expression): Expression = {
+          resolveExpressionByPlanOutput(input, agg.child)
+        }
+        def resolveSubQuery(input: Expression): Expression = {
+          if (SubqueryExpression.hasSubquery(input)) {
+            val fake = Project(Alias(input, "fake")() :: Nil, agg.child)
+            
ResolveSubquery(fake).asInstanceOf[Project].projectList.head.asInstanceOf[Alias].child
+          } else {
+            input
           }
-
-          val sortOrdersMap = unresolvedSortOrders
-            .map(new TreeNodeRef(_))
-            .zip(evaluatedOrderings)
-            .toMap
-          val finalSortOrders = sortOrder.map(s => sortOrdersMap.getOrElse(new 
TreeNodeRef(s), s))
-
-          // Since we don't rely on sort.resolved as the stop condition for 
this rule,
-          // we need to check this and prevent applying this rule multiple 
times
-          if (sortOrder == finalSortOrders) {
-            sort
+        }
+        val maybeResolved = resolveSubQuery(resolveCol(e))
+        if (maybeResolved.resolved && 
maybeResolved.references.subsetOf(agg.outputSet) &&
+          !containsAggregate(maybeResolved)) {
+          // The given expression is valid and doesn't need extra resolution.
+          maybeResolved
+        } else if (containsUnresolvedFunc(maybeResolved)) {
+          // The given expression has unresolved functions which may be 
aggregate functions and we
+          // need to wait for other rules to resolve the functions first.
+          maybeResolved
+        } else {
+          // Avoid adding an extra aggregate expression if it's already 
present in
+          // `agg.aggregateExpressions`.
+          val index = if (maybeResolved.resolved) {
+            agg.aggregateExpressions.indexWhere {
+              case Alias(child, _) => child semanticEquals maybeResolved
+              case other => other semanticEquals maybeResolved
+            }
           } else {
-            Project(aggregate.output,
-              Sort(finalSortOrders, global,
-                aggregate.copy(aggregateExpressions = originalAggExprs ++ 
needsPushDown)))
+            -1
+          }
+          if (index >= 0) {

Review comment:
       `val index = ...` this may return -1 if no match is found.

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
##########
@@ -2457,164 +2450,127 @@ class Analyzer(override val catalogManager: 
CatalogManager)
       _.containsPattern(AGGREGATE), ruleId) {
       // Resolve aggregate with having clause to Filter(..., Aggregate()). 
Note, to avoid wrongly
       // resolve the having condition expression, here we skip resolving it in 
ResolveReferences
-      // and transform it to Filter after aggregate is resolved. See more 
details in SPARK-31519.
+      // and transform it to Filter after aggregate is resolved. Basically 
columns in HAVING should
+      // be resolved with `agg.child.output` first. See more details in 
SPARK-31519.
       case UnresolvedHaving(cond, agg: Aggregate) if agg.resolved =>
-        resolveHaving(Filter(cond, agg), agg)
-
-      case f @ Filter(_, agg: Aggregate) if agg.resolved =>
-        resolveHaving(f, agg)
-
-      case sort @ Sort(sortOrder, global, aggregate: Aggregate) if 
aggregate.resolved =>
-
-        // Try resolving the ordering as though it is in the aggregate clause.
-        try {
-          // If a sort order is unresolved, containing references not in 
aggregate, or containing
-          // `AggregateExpression`, we need to push down it to the underlying 
aggregate operator.
-          val unresolvedSortOrders = sortOrder.filter { s =>
-            !s.resolved || !s.references.subsetOf(aggregate.outputSet) || 
containsAggregate(s)
+        resolveOperatorWithAggregate(Seq(cond), agg, (newExprs, newChild) => {
+          Filter(newExprs.head, newChild)
+        })
+
+      case Filter(cond, agg: Aggregate) if agg.resolved =>
+        // We should resolve the references normally based on child.output 
first.
+        val maybeResolved = resolveExpressionByPlanOutput(cond, agg)
+        resolveOperatorWithAggregate(Seq(maybeResolved), agg, (newExprs, 
newChild) => {
+          Filter(newExprs.head, newChild)
+        })
+
+      case Sort(sortOrder, global, agg: Aggregate) if agg.resolved =>
+        // We should resolve the references normally based on child.output 
first.
+        val maybeResolved = 
sortOrder.map(_.child).map(resolveExpressionByPlanOutput(_, agg))
+        resolveOperatorWithAggregate(maybeResolved, agg, (newExprs, newChild) 
=> {
+          val newSortOrder = sortOrder.zip(newExprs).map {
+            case (sortOrder, expr) => sortOrder.copy(child = expr)
           }
-          val aliasedOrdering = unresolvedSortOrders.map(o => Alias(o.child, 
"aggOrder")())
-
-          val aggregateWithExtraOrdering = aggregate.copy(
-            aggregateExpressions = aggregate.aggregateExpressions ++ 
aliasedOrdering)
-
-          val resolvedAggregate: Aggregate =
-            
executeSameContext(aggregateWithExtraOrdering).asInstanceOf[Aggregate]
-
-          val (reResolvedAggExprs, resolvedAliasedOrdering) =
-            
resolvedAggregate.aggregateExpressions.splitAt(aggregate.aggregateExpressions.length)
-
-          // If we pass the analysis check, then the ordering expressions 
should only reference to
-          // aggregate expressions or grouping expressions, and it's safe to 
push them down to
-          // Aggregate.
-          checkAnalysis(resolvedAggregate)
-
-          val originalAggExprs = 
aggregate.aggregateExpressions.map(trimNonTopLevelAliases)
-
-          // If the ordering expression is same with original aggregate 
expression, we don't need
-          // to push down this ordering expression and can reference the 
original aggregate
-          // expression instead.
-          val needsPushDown = ArrayBuffer.empty[NamedExpression]
-          val orderToAlias = unresolvedSortOrders.zip(aliasedOrdering)
-          val evaluatedOrderings =
-            
resolvedAliasedOrdering.asInstanceOf[Seq[Alias]].zip(orderToAlias).map {
-              case (evaluated, (order, aliasOrder)) =>
-                val index = reResolvedAggExprs.indexWhere {
-                  case Alias(child, _) => child semanticEquals evaluated.child
-                  case other => other semanticEquals evaluated.child
-                }
-
-                if (index == -1) {
-                  if (hasCharVarchar(evaluated)) {
-                    needsPushDown += aliasOrder
-                    order.copy(child = aliasOrder)
-                  } else {
-                    needsPushDown += evaluated
-                    order.copy(child = evaluated.toAttribute)
-                  }
-                } else {
-                  order.copy(child = originalAggExprs(index).toAttribute)
-                }
+          Sort(newSortOrder, global, newChild)
+        })
+    }
+
+    def resolveExprsWithAggregate(
+        exprs: Seq[Expression],
+        agg: Aggregate): (Seq[NamedExpression], Seq[Expression]) = {
+      val aggregateExpressions = ArrayBuffer.empty[NamedExpression]
+      val transformed = exprs.map { e =>
+        // Try resolving the expression as though it is in the aggregate 
clause.
+        def resolveCol(input: Expression): Expression = {
+          resolveExpressionByPlanOutput(input, agg.child)
+        }
+        def resolveSubQuery(input: Expression): Expression = {
+          if (SubqueryExpression.hasSubquery(input)) {
+            val fake = Project(Alias(input, "fake")() :: Nil, agg.child)
+            
ResolveSubquery(fake).asInstanceOf[Project].projectList.head.asInstanceOf[Alias].child
+          } else {
+            input
           }
-
-          val sortOrdersMap = unresolvedSortOrders
-            .map(new TreeNodeRef(_))
-            .zip(evaluatedOrderings)
-            .toMap
-          val finalSortOrders = sortOrder.map(s => sortOrdersMap.getOrElse(new 
TreeNodeRef(s), s))
-
-          // Since we don't rely on sort.resolved as the stop condition for 
this rule,
-          // we need to check this and prevent applying this rule multiple 
times
-          if (sortOrder == finalSortOrders) {
-            sort
+        }
+        val maybeResolved = resolveSubQuery(resolveCol(e))
+        if (maybeResolved.resolved && 
maybeResolved.references.subsetOf(agg.outputSet) &&
+          !containsAggregate(maybeResolved)) {
+          // The given expression is valid and doesn't need extra resolution.
+          maybeResolved
+        } else if (containsUnresolvedFunc(maybeResolved)) {
+          // The given expression has unresolved functions which may be 
aggregate functions and we
+          // need to wait for other rules to resolve the functions first.
+          maybeResolved
+        } else {
+          // Avoid adding an extra aggregate expression if it's already 
present in
+          // `agg.aggregateExpressions`.
+          val index = if (maybeResolved.resolved) {
+            agg.aggregateExpressions.indexWhere {
+              case Alias(child, _) => child semanticEquals maybeResolved
+              case other => other semanticEquals maybeResolved
+            }
           } else {
-            Project(aggregate.output,
-              Sort(finalSortOrders, global,
-                aggregate.copy(aggregateExpressions = originalAggExprs ++ 
needsPushDown)))
+            -1
+          }
+          if (index >= 0) {
+            agg.aggregateExpressions(index).toAttribute
+          } else {
+            buildAggExprList(maybeResolved, agg, aggregateExpressions)
           }
-        } catch {
-          // Attempting to resolve in the aggregate can result in ambiguity.  
When this happens,
-          // just return the original plan.
-          case ae: AnalysisException => sort
         }
+      }
+      (aggregateExpressions.toSeq, transformed)
     }
 
-    def hasCharVarchar(expr: Alias): Boolean = {
-      expr.find {
-        case ne: NamedExpression => 
CharVarcharUtils.getRawType(ne.metadata).nonEmpty
-        case _ => false
-      }.nonEmpty
+    private def buildAggExprList(
+        expr: Expression,
+        agg: Aggregate,
+        aggExprList: ArrayBuffer[NamedExpression]): Expression = expr match {
+      case ae: AggregateExpression if ae.resolved =>
+        val alias = Alias(ae, ae.toString)()
+        aggExprList += alias
+        alias.toAttribute
+      // Grouping functions are handled in the rule 
[[ResolveGroupingAnalytics]].
+      case grouping: Expression if grouping.resolved &&
+          agg.groupingExpressions.exists(_.semanticEquals(grouping)) &&
+          !ResolveGroupingAnalytics.hasGroupingFunction(grouping) &&
+          !agg.output.exists(_.semanticEquals(grouping)) =>
+        grouping match {
+          case ne: NamedExpression =>
+            aggExprList += ne
+            ne.toAttribute
+          case _ =>
+            val alias = Alias(grouping, grouping.toString)()
+            aggExprList += alias
+            alias.toAttribute
+        }
+      case a: Attribute if agg.child.outputSet.contains(a) && 
!agg.outputSet.contains(a) =>
+        // Undo the resolution. This attribute is neither inside aggregate 
functions nor a
+        // grouping column. It shouldn't be resolved with `agg.child.output`.
+        CurrentOrigin.withOrigin(a.origin)(UnresolvedAttribute(Seq(a.name)))

Review comment:
       Some tests failed because the error message is changed. This is to 
restore the previous error message, which is more pretty.

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
##########
@@ -2378,10 +2375,6 @@ class Analyzer(override val catalogManager: 
CatalogManager)
      */
     def apply(plan: LogicalPlan): LogicalPlan = 
plan.resolveOperatorsUpWithPruning(
       _.containsPattern(PLAN_EXPRESSION), ruleId) {
-      // In case of HAVING (a filter after an aggregate) we use both the 
aggregate and
-      // its child for resolution.
-      case f @ Filter(_, a: Aggregate) if f.childrenResolved =>

Review comment:
       Good catch! Since it's an existing bug, let's fix it separately with a 
test.

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
##########
@@ -2457,164 +2450,136 @@ class Analyzer(override val catalogManager: 
CatalogManager)
       _.containsPattern(AGGREGATE), ruleId) {
       // Resolve aggregate with having clause to Filter(..., Aggregate()). 
Note, to avoid wrongly
       // resolve the having condition expression, here we skip resolving it in 
ResolveReferences
-      // and transform it to Filter after aggregate is resolved. See more 
details in SPARK-31519.
+      // and transform it to Filter after aggregate is resolved. Basically 
columns in HAVING should
+      // be resolved with `agg.child.output` first. See more details in 
SPARK-31519.
       case UnresolvedHaving(cond, agg: Aggregate) if agg.resolved =>
-        resolveHaving(Filter(cond, agg), agg)
-
-      case f @ Filter(_, agg: Aggregate) if agg.resolved =>
-        resolveHaving(f, agg)
-
-      case sort @ Sort(sortOrder, global, aggregate: Aggregate) if 
aggregate.resolved =>
-
-        // Try resolving the ordering as though it is in the aggregate clause.
-        try {
-          // If a sort order is unresolved, containing references not in 
aggregate, or containing
-          // `AggregateExpression`, we need to push down it to the underlying 
aggregate operator.
-          val unresolvedSortOrders = sortOrder.filter { s =>
-            !s.resolved || !s.references.subsetOf(aggregate.outputSet) || 
containsAggregate(s)
+        resolveOperatorWithAggregate(Seq(cond), agg, (newExprs, newChild) => {
+          Filter(newExprs.head, newChild)
+        })
+
+      case Filter(cond, agg: Aggregate) if agg.resolved =>
+        // We should resolve the references normally based on child.output 
first.
+        val maybeResolved = resolveExpressionByPlanOutput(cond, agg)
+        resolveOperatorWithAggregate(Seq(maybeResolved), agg, (newExprs, 
newChild) => {
+          Filter(newExprs.head, newChild)
+        })
+
+      case Sort(sortOrder, global, agg: Aggregate) if agg.resolved =>
+        // We should resolve the references normally based on child.output 
first.
+        val maybeResolved = 
sortOrder.map(_.child).map(resolveExpressionByPlanOutput(_, agg))
+        resolveOperatorWithAggregate(maybeResolved, agg, (newExprs, newChild) 
=> {
+          val newSortOrder = sortOrder.zip(newExprs).map {
+            case (sortOrder, expr) => sortOrder.copy(child = expr)
           }
-          val aliasedOrdering = unresolvedSortOrders.map(o => Alias(o.child, 
"aggOrder")())
-
-          val aggregateWithExtraOrdering = aggregate.copy(
-            aggregateExpressions = aggregate.aggregateExpressions ++ 
aliasedOrdering)
-
-          val resolvedAggregate: Aggregate =
-            
executeSameContext(aggregateWithExtraOrdering).asInstanceOf[Aggregate]
-
-          val (reResolvedAggExprs, resolvedAliasedOrdering) =
-            
resolvedAggregate.aggregateExpressions.splitAt(aggregate.aggregateExpressions.length)
-
-          // If we pass the analysis check, then the ordering expressions 
should only reference to
-          // aggregate expressions or grouping expressions, and it's safe to 
push them down to
-          // Aggregate.
-          checkAnalysis(resolvedAggregate)
-
-          val originalAggExprs = 
aggregate.aggregateExpressions.map(trimNonTopLevelAliases)
-
-          // If the ordering expression is same with original aggregate 
expression, we don't need
-          // to push down this ordering expression and can reference the 
original aggregate
-          // expression instead.
-          val needsPushDown = ArrayBuffer.empty[NamedExpression]
-          val orderToAlias = unresolvedSortOrders.zip(aliasedOrdering)
-          val evaluatedOrderings =
-            
resolvedAliasedOrdering.asInstanceOf[Seq[Alias]].zip(orderToAlias).map {
-              case (evaluated, (order, aliasOrder)) =>
-                val index = reResolvedAggExprs.indexWhere {
-                  case Alias(child, _) => child semanticEquals evaluated.child
-                  case other => other semanticEquals evaluated.child
-                }
+          Sort(newSortOrder, global, newChild)
+        })
+    }
 
-                if (index == -1) {
-                  if (hasCharVarchar(evaluated)) {
-                    needsPushDown += aliasOrder
-                    order.copy(child = aliasOrder)
-                  } else {
-                    needsPushDown += evaluated
-                    order.copy(child = evaluated.toAttribute)
-                  }
-                } else {
-                  order.copy(child = originalAggExprs(index).toAttribute)
-                }
+    /**
+     * Resolves the given expressions as if they are in the given Aggregate 
operator, which means
+     * the column can be resolved using `agg.child` and aggregate 
functions/grouping columns are
+     * allowed. It returns a list of named expressions that need to be 
appended to
+     * `agg.aggregateExpressions`, and the list of resolved expressions.
+     */
+    def resolveExprsWithAggregate(
+        exprs: Seq[Expression],
+        agg: Aggregate): (Seq[NamedExpression], Seq[Expression]) = {
+      val extraAggExprs = ArrayBuffer.empty[NamedExpression]
+      val transformed = exprs.map { e =>
+        // Try resolving the expression as though it is in the aggregate 
clause.
+        def resolveCol(input: Expression): Expression = {
+          input.transform {
+            case u: UnresolvedAttribute =>
+              try {
+                 agg.child.resolve(u.nameParts, resolver)
+                   .map(TempResolvedColumn(_, u.nameParts)).getOrElse(u)
+              } catch {
+                case _: AnalysisException => u
+              }
           }
-
-          val sortOrdersMap = unresolvedSortOrders
-            .map(new TreeNodeRef(_))
-            .zip(evaluatedOrderings)
-            .toMap
-          val finalSortOrders = sortOrder.map(s => sortOrdersMap.getOrElse(new 
TreeNodeRef(s), s))
-
-          // Since we don't rely on sort.resolved as the stop condition for 
this rule,
-          // we need to check this and prevent applying this rule multiple 
times
-          if (sortOrder == finalSortOrders) {
-            sort
+        }
+        def resolveSubQuery(input: Expression): Expression = {
+          if (SubqueryExpression.hasSubquery(input)) {
+            val fake = Project(Alias(input, "fake")() :: Nil, agg.child)
+            
ResolveSubquery(fake).asInstanceOf[Project].projectList.head.asInstanceOf[Alias].child
           } else {
-            Project(aggregate.output,
-              Sort(finalSortOrders, global,
-                aggregate.copy(aggregateExpressions = originalAggExprs ++ 
needsPushDown)))
+            input
           }
-        } catch {
-          // Attempting to resolve in the aggregate can result in ambiguity.  
When this happens,
-          // just return the original plan.
-          case ae: AnalysisException => sort
         }
-    }
 
-    def hasCharVarchar(expr: Alias): Boolean = {
-      expr.find {
-        case ne: NamedExpression => 
CharVarcharUtils.getRawType(ne.metadata).nonEmpty
-        case _ => false
-      }.nonEmpty
+        val maybeResolved = resolveSubQuery(resolveCol(e))
+        if (!maybeResolved.resolved) {
+          maybeResolved
+        } else {
+          buildAggExprList(maybeResolved, agg, extraAggExprs)
+        }
+      }
+      (extraAggExprs.toSeq, transformed)
     }
 
-    def containsAggregate(condition: Expression): Boolean = {
-      condition.find(_.isInstanceOf[AggregateExpression]).isDefined
+    private def trimTempResolvedField(input: Expression): Expression = 
input.transform {
+      case t: TempResolvedColumn => t.child
     }
 
-    def resolveFilterCondInAggregate(
-        filterCond: Expression, agg: Aggregate): Option[(Seq[NamedExpression], 
Expression)] = {
-      try {
-        val aggregatedCondition =
-          Aggregate(
-            agg.groupingExpressions,
-            Alias(filterCond, "havingCondition")() :: Nil,
-            agg.child)
-        val resolvedOperator = executeSameContext(aggregatedCondition)
-        def resolvedAggregateFilter =
-          resolvedOperator
-            .asInstanceOf[Aggregate]
-            .aggregateExpressions.head
-
-        // If resolution was successful and we see the filter has an aggregate 
in it, add it to
-        // the original aggregate operator.
-        if (resolvedOperator.resolved) {
-          // Try to replace all aggregate expressions in the filter by an 
alias.
-          val aggregateExpressions = ArrayBuffer.empty[NamedExpression]
-          val transformedAggregateFilter = resolvedAggregateFilter.transform {
-            case ae: AggregateExpression =>
-              val alias = Alias(ae, ae.toString)()
-              aggregateExpressions += alias
+    private def buildAggExprList(
+        expr: Expression,
+        agg: Aggregate,
+        aggExprList: ArrayBuffer[NamedExpression]): Expression = {
+      // Avoid adding an extra aggregate expression if it's already present in
+      // `agg.aggregateExpressions`.
+      val index = agg.aggregateExpressions.indexWhere {

Review comment:
       Makes sense. The current code follows the previous one in 
https://github.com/apache/spark/pull/32470/files#diff-ed19f376a63eba52eea59ca71f3355d4495fad4fad4db9a3324aade0d4986a47L2502
   
   I'll do it in a follow-up as this is an improvement, and update TPCDS query 
golden files if needed.

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
##########
@@ -278,11 +278,6 @@ case class AttributeReference(
     case _ => false
   }
 
-  override def semanticEquals(other: Expression): Boolean = other match {

Review comment:
       will be done by https://github.com/apache/spark/pull/32885

##########
File path: sql/core/src/test/resources/sql-tests/results/explain-aqe.sql.out
##########
@@ -186,17 +185,13 @@ Input [2]: [key#x, max#x]
 Keys [1]: [key#x]
 Functions [1]: [max(val#x)]
 Aggregate Attributes [1]: [max(val#x)#x]
-Results [3]: [key#x, max(val#x)#x AS max(val)#x, max(val#x)#x AS max(val#x)#x]
+Results [2]: [key#x, max(val#x)#x AS max(val)#x]

Review comment:
       Removes a duplicated max function.

##########
File path: sql/core/src/test/resources/sql-tests/results/explain.sql.out
##########
@@ -188,15 +187,11 @@ Input [2]: [key#x, max#x]
 Keys [1]: [key#x]
 Functions [1]: [max(val#x)]
 Aggregate Attributes [1]: [max(val#x)#x]
-Results [3]: [key#x, max(val#x)#x AS max(val)#x, max(val#x)#x AS max(val#x)#x]
+Results [2]: [key#x, max(val#x)#x AS max(val)#x]

Review comment:
       ditto

##########
File path: 
sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14a.sf100/explain.txt
##########
@@ -536,38 +536,38 @@ Input [6]: [i_brand_id#54, i_class_id#55, 
i_category_id#56, sum#62, isEmpty#63,
 Keys [3]: [i_brand_id#54, i_class_id#55, i_category_id#56]
 Functions [2]: [sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 
as decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ss_list_price#3 
as decimal(12,2)))), DecimalType(18,2), true)), count(1)]
 Aggregate Attributes [2]: 
[sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(ss_list_price#3 as 
decimal(12,2)))), DecimalType(18,2), true))#66, count(1)#67]
-Results [6]: [i_brand_id#54, i_class_id#55, i_category_id#56, 
sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(ss_list_price#3 as 
decimal(12,2)))), DecimalType(18,2), true))#66 AS sales#68, count(1)#67 AS 
number_sales#69, sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 
as decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ss_list_price#3 
as decimal(12,2)))), DecimalType(18,2), true))#66 AS 
sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(ss_list_price#3 as 
decimal(12,2)))), DecimalType(18,2), true))#70]
+Results [5]: [i_brand_id#54, i_class_id#55, i_category_id#56, 
sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(ss_list_price#3 as 
decimal(12,2)))), DecimalType(18,2), true))#66 AS sales#68, count(1)#67 AS 
number_sales#69]

Review comment:
       removes a duplicated sum function.

##########
File path: 
sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14a.sf100/explain.txt
##########
@@ -577,82 +577,82 @@ Input [1]: [ss_item_sk#47]
 Arguments: [ss_item_sk#47 ASC NULLS FIRST], false, 0
 
 (98) SortMergeJoin [codegen id : 91]
-Left keys [1]: [cs_item_sk#74]
+Left keys [1]: [cs_item_sk#73]
 Right keys [1]: [ss_item_sk#47]
 Join condition: None
 
 (99) ReusedExchange [Reuses operator id: 72]
-Output [1]: [d_date_sk#79]
+Output [1]: [d_date_sk#78]
 
 (100) BroadcastHashJoin [codegen id : 91]
-Left keys [1]: [cs_sold_date_sk#77]
-Right keys [1]: [d_date_sk#79]
+Left keys [1]: [cs_sold_date_sk#76]
+Right keys [1]: [d_date_sk#78]
 Join condition: None
 
 (101) Project [codegen id : 91]
-Output [3]: [cs_item_sk#74, cs_quantity#75, cs_list_price#76]
-Input [5]: [cs_item_sk#74, cs_quantity#75, cs_list_price#76, 
cs_sold_date_sk#77, d_date_sk#79]
+Output [3]: [cs_item_sk#73, cs_quantity#74, cs_list_price#75]
+Input [5]: [cs_item_sk#73, cs_quantity#74, cs_list_price#75, 
cs_sold_date_sk#76, d_date_sk#78]
 
 (102) ReusedExchange [Reuses operator id: 83]
-Output [4]: [i_item_sk#80, i_brand_id#81, i_class_id#82, i_category_id#83]
+Output [4]: [i_item_sk#79, i_brand_id#80, i_class_id#81, i_category_id#82]
 
 (103) BroadcastHashJoin [codegen id : 91]
-Left keys [1]: [cs_item_sk#74]
-Right keys [1]: [i_item_sk#80]
+Left keys [1]: [cs_item_sk#73]
+Right keys [1]: [i_item_sk#79]
 Join condition: None
 
 (104) Project [codegen id : 91]
-Output [5]: [cs_quantity#75, cs_list_price#76, i_brand_id#81, i_class_id#82, 
i_category_id#83]
-Input [7]: [cs_item_sk#74, cs_quantity#75, cs_list_price#76, i_item_sk#80, 
i_brand_id#81, i_class_id#82, i_category_id#83]
+Output [5]: [cs_quantity#74, cs_list_price#75, i_brand_id#80, i_class_id#81, 
i_category_id#82]
+Input [7]: [cs_item_sk#73, cs_quantity#74, cs_list_price#75, i_item_sk#79, 
i_brand_id#80, i_class_id#81, i_category_id#82]
 
 (105) HashAggregate [codegen id : 91]
-Input [5]: [cs_quantity#75, cs_list_price#76, i_brand_id#81, i_class_id#82, 
i_category_id#83]
-Keys [3]: [i_brand_id#81, i_class_id#82, i_category_id#83]
-Functions [2]: 
[partial_sum(CheckOverflow((promote_precision(cast(cast(cs_quantity#75 as 
decimal(10,0)) as decimal(12,2))) * promote_precision(cast(cs_list_price#76 as 
decimal(12,2)))), DecimalType(18,2), true)), partial_count(1)]
-Aggregate Attributes [3]: [sum#84, isEmpty#85, count#86]
-Results [6]: [i_brand_id#81, i_class_id#82, i_category_id#83, sum#87, 
isEmpty#88, count#89]
+Input [5]: [cs_quantity#74, cs_list_price#75, i_brand_id#80, i_class_id#81, 
i_category_id#82]
+Keys [3]: [i_brand_id#80, i_class_id#81, i_category_id#82]
+Functions [2]: 
[partial_sum(CheckOverflow((promote_precision(cast(cast(cs_quantity#74 as 
decimal(10,0)) as decimal(12,2))) * promote_precision(cast(cs_list_price#75 as 
decimal(12,2)))), DecimalType(18,2), true)), partial_count(1)]
+Aggregate Attributes [3]: [sum#83, isEmpty#84, count#85]
+Results [6]: [i_brand_id#80, i_class_id#81, i_category_id#82, sum#86, 
isEmpty#87, count#88]
 
 (106) Exchange
-Input [6]: [i_brand_id#81, i_class_id#82, i_category_id#83, sum#87, 
isEmpty#88, count#89]
-Arguments: hashpartitioning(i_brand_id#81, i_class_id#82, i_category_id#83, 
5), ENSURE_REQUIREMENTS, [id=#90]
+Input [6]: [i_brand_id#80, i_class_id#81, i_category_id#82, sum#86, 
isEmpty#87, count#88]
+Arguments: hashpartitioning(i_brand_id#80, i_class_id#81, i_category_id#82, 
5), ENSURE_REQUIREMENTS, [id=#89]
 
 (107) HashAggregate [codegen id : 92]
-Input [6]: [i_brand_id#81, i_class_id#82, i_category_id#83, sum#87, 
isEmpty#88, count#89]
-Keys [3]: [i_brand_id#81, i_class_id#82, i_category_id#83]
-Functions [2]: [sum(CheckOverflow((promote_precision(cast(cast(cs_quantity#75 
as decimal(10,0)) as decimal(12,2))) * promote_precision(cast(cs_list_price#76 
as decimal(12,2)))), DecimalType(18,2), true)), count(1)]
-Aggregate Attributes [2]: 
[sum(CheckOverflow((promote_precision(cast(cast(cs_quantity#75 as 
decimal(10,0)) as decimal(12,2))) * promote_precision(cast(cs_list_price#76 as 
decimal(12,2)))), DecimalType(18,2), true))#91, count(1)#92]
-Results [6]: [i_brand_id#81, i_class_id#82, i_category_id#83, 
sum(CheckOverflow((promote_precision(cast(cast(cs_quantity#75 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(cs_list_price#76 as 
decimal(12,2)))), DecimalType(18,2), true))#91 AS sales#93, count(1)#92 AS 
number_sales#94, sum(CheckOverflow((promote_precision(cast(cast(cs_quantity#75 
as decimal(10,0)) as decimal(12,2))) * promote_precision(cast(cs_list_price#76 
as decimal(12,2)))), DecimalType(18,2), true))#91 AS 
sum(CheckOverflow((promote_precision(cast(cast(cs_quantity#75 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(cs_list_price#76 as 
decimal(12,2)))), DecimalType(18,2), true))#95]
+Input [6]: [i_brand_id#80, i_class_id#81, i_category_id#82, sum#86, 
isEmpty#87, count#88]
+Keys [3]: [i_brand_id#80, i_class_id#81, i_category_id#82]
+Functions [2]: [sum(CheckOverflow((promote_precision(cast(cast(cs_quantity#74 
as decimal(10,0)) as decimal(12,2))) * promote_precision(cast(cs_list_price#75 
as decimal(12,2)))), DecimalType(18,2), true)), count(1)]
+Aggregate Attributes [2]: 
[sum(CheckOverflow((promote_precision(cast(cast(cs_quantity#74 as 
decimal(10,0)) as decimal(12,2))) * promote_precision(cast(cs_list_price#75 as 
decimal(12,2)))), DecimalType(18,2), true))#90, count(1)#91]
+Results [5]: [i_brand_id#80, i_class_id#81, i_category_id#82, 
sum(CheckOverflow((promote_precision(cast(cast(cs_quantity#74 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(cs_list_price#75 as 
decimal(12,2)))), DecimalType(18,2), true))#90 AS sales#92, count(1)#91 AS 
number_sales#93]

Review comment:
       ditto

##########
File path: 
sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14a.sf100/explain.txt
##########
@@ -662,91 +662,91 @@ Input [1]: [ss_item_sk#47]
 Arguments: [ss_item_sk#47 ASC NULLS FIRST], false, 0
 
 (117) SortMergeJoin [codegen id : 137]
-Left keys [1]: [ws_item_sk#97]
+Left keys [1]: [ws_item_sk#95]
 Right keys [1]: [ss_item_sk#47]
 Join condition: None
 
 (118) ReusedExchange [Reuses operator id: 72]
-Output [1]: [d_date_sk#102]
+Output [1]: [d_date_sk#100]
 
 (119) BroadcastHashJoin [codegen id : 137]
-Left keys [1]: [ws_sold_date_sk#100]
-Right keys [1]: [d_date_sk#102]
+Left keys [1]: [ws_sold_date_sk#98]
+Right keys [1]: [d_date_sk#100]
 Join condition: None
 
 (120) Project [codegen id : 137]
-Output [3]: [ws_item_sk#97, ws_quantity#98, ws_list_price#99]
-Input [5]: [ws_item_sk#97, ws_quantity#98, ws_list_price#99, 
ws_sold_date_sk#100, d_date_sk#102]
+Output [3]: [ws_item_sk#95, ws_quantity#96, ws_list_price#97]
+Input [5]: [ws_item_sk#95, ws_quantity#96, ws_list_price#97, 
ws_sold_date_sk#98, d_date_sk#100]
 
 (121) ReusedExchange [Reuses operator id: 83]
-Output [4]: [i_item_sk#103, i_brand_id#104, i_class_id#105, i_category_id#106]
+Output [4]: [i_item_sk#101, i_brand_id#102, i_class_id#103, i_category_id#104]
 
 (122) BroadcastHashJoin [codegen id : 137]
-Left keys [1]: [ws_item_sk#97]
-Right keys [1]: [i_item_sk#103]
+Left keys [1]: [ws_item_sk#95]
+Right keys [1]: [i_item_sk#101]
 Join condition: None
 
 (123) Project [codegen id : 137]
-Output [5]: [ws_quantity#98, ws_list_price#99, i_brand_id#104, i_class_id#105, 
i_category_id#106]
-Input [7]: [ws_item_sk#97, ws_quantity#98, ws_list_price#99, i_item_sk#103, 
i_brand_id#104, i_class_id#105, i_category_id#106]
+Output [5]: [ws_quantity#96, ws_list_price#97, i_brand_id#102, i_class_id#103, 
i_category_id#104]
+Input [7]: [ws_item_sk#95, ws_quantity#96, ws_list_price#97, i_item_sk#101, 
i_brand_id#102, i_class_id#103, i_category_id#104]
 
 (124) HashAggregate [codegen id : 137]
-Input [5]: [ws_quantity#98, ws_list_price#99, i_brand_id#104, i_class_id#105, 
i_category_id#106]
-Keys [3]: [i_brand_id#104, i_class_id#105, i_category_id#106]
-Functions [2]: 
[partial_sum(CheckOverflow((promote_precision(cast(cast(ws_quantity#98 as 
decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ws_list_price#99 as 
decimal(12,2)))), DecimalType(18,2), true)), partial_count(1)]
-Aggregate Attributes [3]: [sum#107, isEmpty#108, count#109]
-Results [6]: [i_brand_id#104, i_class_id#105, i_category_id#106, sum#110, 
isEmpty#111, count#112]
+Input [5]: [ws_quantity#96, ws_list_price#97, i_brand_id#102, i_class_id#103, 
i_category_id#104]
+Keys [3]: [i_brand_id#102, i_class_id#103, i_category_id#104]
+Functions [2]: 
[partial_sum(CheckOverflow((promote_precision(cast(cast(ws_quantity#96 as 
decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ws_list_price#97 as 
decimal(12,2)))), DecimalType(18,2), true)), partial_count(1)]
+Aggregate Attributes [3]: [sum#105, isEmpty#106, count#107]
+Results [6]: [i_brand_id#102, i_class_id#103, i_category_id#104, sum#108, 
isEmpty#109, count#110]
 
 (125) Exchange
-Input [6]: [i_brand_id#104, i_class_id#105, i_category_id#106, sum#110, 
isEmpty#111, count#112]
-Arguments: hashpartitioning(i_brand_id#104, i_class_id#105, i_category_id#106, 
5), ENSURE_REQUIREMENTS, [id=#113]
+Input [6]: [i_brand_id#102, i_class_id#103, i_category_id#104, sum#108, 
isEmpty#109, count#110]
+Arguments: hashpartitioning(i_brand_id#102, i_class_id#103, i_category_id#104, 
5), ENSURE_REQUIREMENTS, [id=#111]
 
 (126) HashAggregate [codegen id : 138]
-Input [6]: [i_brand_id#104, i_class_id#105, i_category_id#106, sum#110, 
isEmpty#111, count#112]
-Keys [3]: [i_brand_id#104, i_class_id#105, i_category_id#106]
-Functions [2]: [sum(CheckOverflow((promote_precision(cast(cast(ws_quantity#98 
as decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ws_list_price#99 
as decimal(12,2)))), DecimalType(18,2), true)), count(1)]
-Aggregate Attributes [2]: 
[sum(CheckOverflow((promote_precision(cast(cast(ws_quantity#98 as 
decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ws_list_price#99 as 
decimal(12,2)))), DecimalType(18,2), true))#114, count(1)#115]
-Results [6]: [i_brand_id#104, i_class_id#105, i_category_id#106, 
sum(CheckOverflow((promote_precision(cast(cast(ws_quantity#98 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(ws_list_price#99 as 
decimal(12,2)))), DecimalType(18,2), true))#114 AS sales#116, count(1)#115 AS 
number_sales#117, sum(CheckOverflow((promote_precision(cast(cast(ws_quantity#98 
as decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ws_list_price#99 
as decimal(12,2)))), DecimalType(18,2), true))#114 AS 
sum(CheckOverflow((promote_precision(cast(cast(ws_quantity#98 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(ws_list_price#99 as 
decimal(12,2)))), DecimalType(18,2), true))#118]
+Input [6]: [i_brand_id#102, i_class_id#103, i_category_id#104, sum#108, 
isEmpty#109, count#110]
+Keys [3]: [i_brand_id#102, i_class_id#103, i_category_id#104]
+Functions [2]: [sum(CheckOverflow((promote_precision(cast(cast(ws_quantity#96 
as decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ws_list_price#97 
as decimal(12,2)))), DecimalType(18,2), true)), count(1)]
+Aggregate Attributes [2]: 
[sum(CheckOverflow((promote_precision(cast(cast(ws_quantity#96 as 
decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ws_list_price#97 as 
decimal(12,2)))), DecimalType(18,2), true))#112, count(1)#113]
+Results [5]: [i_brand_id#102, i_class_id#103, i_category_id#104, 
sum(CheckOverflow((promote_precision(cast(cast(ws_quantity#96 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(ws_list_price#97 as 
decimal(12,2)))), DecimalType(18,2), true))#112 AS sales#114, count(1)#113 AS 
number_sales#115]

Review comment:
       ditto

##########
File path: 
sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14a/explain.txt
##########
@@ -465,193 +465,193 @@ Input [6]: [i_brand_id#46, i_class_id#47, 
i_category_id#48, sum#57, isEmpty#58,
 Keys [3]: [i_brand_id#46, i_class_id#47, i_category_id#48]
 Functions [2]: [sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 
as decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ss_list_price#3 
as decimal(12,2)))), DecimalType(18,2), true)), count(1)]
 Aggregate Attributes [2]: 
[sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(ss_list_price#3 as 
decimal(12,2)))), DecimalType(18,2), true))#61, count(1)#62]
-Results [6]: [i_brand_id#46, i_class_id#47, i_category_id#48, 
sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(ss_list_price#3 as 
decimal(12,2)))), DecimalType(18,2), true))#61 AS sales#63, count(1)#62 AS 
number_sales#64, sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 
as decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ss_list_price#3 
as decimal(12,2)))), DecimalType(18,2), true))#61 AS 
sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(ss_list_price#3 as 
decimal(12,2)))), DecimalType(18,2), true))#65]
+Results [5]: [i_brand_id#46, i_class_id#47, i_category_id#48, 
sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(ss_list_price#3 as 
decimal(12,2)))), DecimalType(18,2), true))#61 AS sales#63, count(1)#62 AS 
number_sales#64]

Review comment:
       ditto

##########
File path: 
sql/core/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14b.sf100/explain.txt
##########
@@ -518,352 +516,344 @@ Input [6]: [i_brand_id#55, i_class_id#56, 
i_category_id#57, sum#63, isEmpty#64,
 Keys [3]: [i_brand_id#55, i_class_id#56, i_category_id#57]
 Functions [2]: [sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 
as decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ss_list_price#3 
as decimal(12,2)))), DecimalType(18,2), true)), count(1)]
 Aggregate Attributes [2]: 
[sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(ss_list_price#3 as 
decimal(12,2)))), DecimalType(18,2), true))#67, count(1)#68]
-Results [6]: [i_brand_id#55, i_class_id#56, i_category_id#57, 
sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(ss_list_price#3 as 
decimal(12,2)))), DecimalType(18,2), true))#67 AS sales#69, count(1)#68 AS 
number_sales#70, sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 
as decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ss_list_price#3 
as decimal(12,2)))), DecimalType(18,2), true))#67 AS 
sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 as decimal(10,0)) 
as decimal(12,2))) * promote_precision(cast(ss_list_price#3 as 
decimal(12,2)))), DecimalType(18,2), true))#71]
+Results [6]: [store AS channel#69, i_brand_id#55, i_class_id#56, 
i_category_id#57, sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 
as decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ss_list_price#3 
as decimal(12,2)))), DecimalType(18,2), true))#67 AS sales#70, count(1)#68 AS 
number_sales#71]
 
 (89) Filter [codegen id : 92]
-Input [6]: [i_brand_id#55, i_class_id#56, i_category_id#57, sales#69, 
number_sales#70, sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 
as decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ss_list_price#3 
as decimal(12,2)))), DecimalType(18,2), true))#71]
-Condition : 
(isnotnull(sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 as 
decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ss_list_price#3 as 
decimal(12,2)))), DecimalType(18,2), true))#71) AND 
(cast(sum(CheckOverflow((promote_precision(cast(cast(ss_quantity#2 as 
decimal(10,0)) as decimal(12,2))) * promote_precision(cast(ss_list_price#3 as 
decimal(12,2)))), DecimalType(18,2), true))#71 as decimal(32,6)) > 
cast(Subquery scalar-subquery#72, [id=#73] as decimal(32,6))))
+Input [6]: [channel#69, i_brand_id#55, i_class_id#56, i_category_id#57, 
sales#70, number_sales#71]
+Condition : (isnotnull(sales#70) AND (cast(sales#70 as decimal(32,6)) > 
cast(Subquery scalar-subquery#72, [id=#73] as decimal(32,6))))
 
-(90) Project [codegen id : 92]
-Output [6]: [store AS channel#74, i_brand_id#55, i_class_id#56, 
i_category_id#57, sales#69, number_sales#70]

Review comment:
       `store AS channel#74` is in the aggregate node and the project is no 
longer needed.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to