cloud-fan commented on code in PR #44532:
URL: https://github.com/apache/spark/pull/44532#discussion_r1445668556
##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ColumnResolutionHelper.scala:
##########
@@ -487,42 +487,81 @@ trait ColumnResolutionHelper extends Logging with
DataTypeErrorsBase {
// original expression as it is.
private def tryResolveColumnByPlanId(
e: Expression,
- q: LogicalPlan,
- idToPlan: mutable.HashMap[Long, LogicalPlan] = mutable.HashMap.empty):
Expression = e match {
+ q: Seq[LogicalPlan]): Expression = e match {
case u: UnresolvedAttribute =>
- resolveUnresolvedAttributeByPlanId(
- u, q, idToPlan: mutable.HashMap[Long, LogicalPlan]
- ).getOrElse(u)
+ resolveUnresolvedAttributeByPlanId(u, q).getOrElse(u)
case _ if e.containsPattern(UNRESOLVED_ATTRIBUTE) =>
- e.mapChildren(c => tryResolveColumnByPlanId(c, q, idToPlan))
+ e.mapChildren(c => tryResolveColumnByPlanId(c, q))
case _ => e
}
private def resolveUnresolvedAttributeByPlanId(
u: UnresolvedAttribute,
- q: LogicalPlan,
- idToPlan: mutable.HashMap[Long, LogicalPlan]): Option[NamedExpression] =
{
+ q: Seq[LogicalPlan]): Option[NamedExpression] = {
val planIdOpt = u.getTagValue(LogicalPlan.PLAN_ID_TAG)
if (planIdOpt.isEmpty) return None
val planId = planIdOpt.get
logDebug(s"Extract plan_id $planId from $u")
- val plan = idToPlan.getOrElseUpdate(planId, {
- findPlanById(u, planId, q).getOrElse {
- // For example:
- // df1 = spark.createDataFrame([Row(a = 1, b = 2, c = 3)]])
- // df2 = spark.createDataFrame([Row(a = 1, b = 2)]])
- // df1.select(df2.a) <- illegal reference df2.a
- throw new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_3051",
- messageParameters = Map(
- "u" -> u.toString,
- "planId" -> planId.toString,
- "q" -> q.toString))
+ val isMetadataAccess = u.getTagValue(LogicalPlan.IS_METADATA_COL).nonEmpty
+ val (resolved, matched) = resolveByPlanId(u, planId, isMetadataAccess, q)
+
+ if (!matched) {
+ // Can not find the target plan node with plan id, e.g.
+ // df1 = spark.createDataFrame([Row(a = 1, b = 2, c = 3)]])
+ // df2 = spark.createDataFrame([Row(a = 1, b = 2)]])
+ // df1.select(df2.a) <- illegal reference df2.a
+ throw QueryCompilationErrors.cannotResolveColumn(u)
+ }
+ resolved
+ }
+
+ private def resolveByPlanId(
+ u: UnresolvedAttribute,
+ id: Long,
+ isMetadataAccess: Boolean,
+ q: Seq[LogicalPlan]): (Option[NamedExpression], Boolean) = {
+ q.iterator.map(resolveByPlanId(u, id, isMetadataAccess, _))
+ .foldLeft((Option.empty[NamedExpression], false)) {
+ case ((r1, m1), (r2, m2)) =>
+ if (r1.nonEmpty && r2.nonEmpty) {
+ throw QueryCompilationErrors.ambiguousColumnReferences(u)
+ }
+ (if (r1.nonEmpty) r1 else r2, m1 | m2)
}
- })
+ }
+
+ private def resolveByPlanId(
+ u: UnresolvedAttribute,
+ id: Long,
+ isMetadataAccess: Boolean,
+ p: LogicalPlan): (Option[NamedExpression], Boolean) = {
+ val (resolved, matched) = if
(p.getTagValue(LogicalPlan.PLAN_ID_TAG).contains(id)) {
+ (resolveByPlan(u, p, isMetadataAccess), true)
+ } else {
+ resolveByPlanId(u, id, isMetadataAccess, p.children)
+ }
- val isMetadataAccess = u.getTagValue(LogicalPlan.IS_METADATA_COL).isDefined
+ // Even with the target plan node, resolveUnresolvedAttributeByPlanId still
+ // can not guarantee successfully resolving u:
+ // there are several rules supporting missing column resolution
+ // (e.g. ResolveReferencesInSort), but the resolved attribute maybe
filtered
+ // out by the output attribute set.
+ // In this case, fall back to column resolution without plan id.
+ val filtered = resolved.filter { r =>
+ if (isMetadataAccess) {
+ r.references.subsetOf(AttributeSet(p.output ++ p.metadataOutput))
+ } else {
+ r.references.subsetOf(p.outputSet)
+ }
+ }
+ (filtered, matched)
+ }
+
+ private def resolveByPlan(
Review Comment:
This function is called only in one place, we can inline it.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]