Github user cloud-fan commented on a diff in the pull request:
https://github.com/apache/spark/pull/11283#discussion_r54834856
--- Diff:
sql/hive/src/main/scala/org/apache/spark/sql/hive/SQLBuilder.scala ---
@@ -211,6 +215,73 @@ class SQLBuilder(logicalPlan: LogicalPlan, sqlContext:
SQLContext) extends Loggi
)
}
+ private def groupingSetToSQL(
+ plan: Aggregate,
+ expand: Expand,
+ project: Project): String = {
+ require(plan.groupingExpressions.length > 1)
+
+ // The last column of Expand is always grouping ID
+ val gid = expand.output.last
+
+ // In cube/rollup/groupingsets, Analyzer creates new aliases for all
group by expressions.
+ // Since conversion from attribute back SQL ignore expression IDs, the
alias of attribute
+ // references are ignored in aliasMap
+ val aliasMap = AttributeMap(project.projectList.collect {
+ case a @ Alias(child, name) => (a.toAttribute, a)
+ })
+
+ val groupByAttributes =
plan.groupingExpressions.dropRight(1).map(_.asInstanceOf[Attribute])
+ val groupByAttrMap = AttributeMap(groupByAttributes.zip(
+
project.projectList.drop(project.child.output.length).map(_.asInstanceOf[Alias].child)))
+ val groupingExprs = groupByAttrMap.values.toArray
+ val groupingSQL = groupingExprs.map(_.sql).mkString(", ")
+
+ val groupingSet = expand.projections.map(_.dropRight(1).filter {
--- End diff --
we can also simplify this:
```
val groupingAttrSet = AttributeSet(groupByAttributes)
val groupingSet = expand.projections.map { project =>
project.dropRight(1).collect {
case attr: Attribute if groupingAttrSet.contains(attr) => attr
}.map(groupByAttrMap.get)
}
```
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]