Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/18570#discussion_r126280881
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/execution/stat/StatFunctions.scala 
---
    @@ -228,90 +229,71 @@ object StatFunctions extends Logging {
         val defaultStatistics = Seq("count", "mean", "stddev", "min", "25%", 
"50%", "75%", "max")
         val selectedStatistics = if (statistics.nonEmpty) statistics else 
defaultStatistics
     
    -    val hasPercentiles = selectedStatistics.exists(_.endsWith("%"))
    -    val (percentiles, percentileNames, remainingAggregates) = if 
(hasPercentiles) {
    -      val (pStrings, rest) = selectedStatistics.partition(a => 
a.endsWith("%"))
    -      val percentiles = pStrings.map { p =>
    -        try {
    -          p.stripSuffix("%").toDouble / 100.0
    -        } catch {
    -          case e: NumberFormatException =>
    -            throw new IllegalArgumentException(s"Unable to parse $p as a 
percentile", e)
    -        }
    +    val percentiles = selectedStatistics.filter(a => a.endsWith("%")).map 
{ p =>
    +      try {
    +        p.stripSuffix("%").toDouble / 100.0
    +      } catch {
    +        case e: NumberFormatException =>
    +          throw new IllegalArgumentException(s"Unable to parse $p as a 
percentile", e)
           }
    -      require(percentiles.forall(p => p >= 0 && p <= 1), "Percentiles must 
be in the range [0, 1]")
    -      (percentiles, pStrings, rest)
    -    } else {
    -      (Seq(), Seq(), selectedStatistics)
    -    }
    -
    -
    -    // The list of summary statistics to compute, in the form of 
expressions.
    -    val availableStatistics = Map[String, Expression => Expression](
    -      "count" -> ((child: Expression) => 
Count(child).toAggregateExpression()),
    -      "mean" -> ((child: Expression) => 
Average(child).toAggregateExpression()),
    -      "stddev" -> ((child: Expression) => 
StddevSamp(child).toAggregateExpression()),
    -      "min" -> ((child: Expression) => Min(child).toAggregateExpression()),
    -      "max" -> ((child: Expression) => Max(child).toAggregateExpression()))
    -
    -    val statisticFns = remainingAggregates.map { agg =>
    -      require(availableStatistics.contains(agg), s"$agg is not a 
recognised statistic")
    -      agg -> availableStatistics(agg)
         }
    +    require(percentiles.forall(p => p >= 0 && p <= 1), "Percentiles must 
be in the range [0, 1]")
     
    -    def percentileAgg(child: Expression): Expression =
    -      new ApproximatePercentile(child, 
CreateArray(percentiles.map(Literal(_))))
    -        .toAggregateExpression()
    -
    -    val outputCols = 
ds.aggregatableColumns.map(usePrettyExpression(_).sql).toList
    -
    -    val ret: Seq[Row] = if (outputCols.nonEmpty) {
    -      var aggExprs = statisticFns.toList.flatMap { case (_, colToAgg) =>
    -        outputCols.map(c => Column(Cast(colToAgg(Column(c).expr), 
StringType)).as(c))
    -      }
    -      if (hasPercentiles) {
    -        aggExprs = outputCols.map(c => 
Column(percentileAgg(Column(c).expr)).as(c)) ++ aggExprs
    +    var percentileIndex = 0
    +    val statisticFns = selectedStatistics.map { stats =>
    +      if (stats.endsWith("%")) {
    +        val index = percentileIndex
    +        percentileIndex += 1
    +        (child: Expression) =>
    +          GetArrayItem(
    +            new ApproximatePercentile(child, 
Literal.create(percentiles)).toAggregateExpression(),
    --- End diff --
    
    The aggregate operator in Spark SQL only executes duplicated aggregate 
expressions once, so it's ok to have duplicated `ApproximatePercentile` in 
aggregate expressions.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to