cloud-fan commented on code in PR #36150:
URL: https://github.com/apache/spark/pull/36150#discussion_r923025241
##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala:
##########
@@ -859,6 +864,36 @@ class Analyzer(override val catalogManager: CatalogManager)
}
}
+ object ResolveUnpivot extends Rule[LogicalPlan] {
+ def apply(plan: LogicalPlan): LogicalPlan =
plan.resolveOperatorsWithPruning(
+ _.containsPattern(UNPIVOT), ruleId) {
+
+ // once children and ids are resolved, we can determine values, if non
were given
+ case m: Unpivot if m.childrenResolved && m.ids.forall(_.resolved) &&
m.values.isEmpty =>
+ m.copy(values = m.child.output.diff(m.ids))
+
+ case m: Unpivot if !m.childrenResolved || !m.ids.forall(_.resolved)
+ || m.values.isEmpty || !m.values.forall(_.resolved) ||
m.valueType.isEmpty => m
+
+ // TypeCoercionBase.UnpivotCoercion determines valueType
+ // and casts values once values are set and resolved
+ case Unpivot(ids, values, variableColumnName, valueColumnName,
valueType, child) =>
Review Comment:
nit: I think we can merge the first case into this one:
```
val actualValues = if (values.isEmpty) {
m.child.output.diff(m.ids)
} elese {
values
}
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]