cloud-fan commented on code in PR #53811:
URL: https://github.com/apache/spark/pull/53811#discussion_r2699397145
##########
sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/EvalSubqueriesForTimeTravel.scala:
##########
@@ -35,26 +34,15 @@ class EvalSubqueriesForTimeTravel extends Rule[LogicalPlan]
{
// outer references and should not be correlated.
assert(!s.isCorrelated, "Correlated subquery should not appear in " +
classOf[EvalSubqueriesForTimeTravel].getSimpleName)
- SimpleAnalyzer.checkSubqueryExpression(r, s)
- val executedPlan =
QueryExecution.prepareExecutedPlan(SparkSession.active, s.plan)
- val physicalSubquery = ScalarSubqueryExec(
- SubqueryExec.createForScalarSubquery(
- s"scalar-subquery#${s.exprId.id}", executedPlan),
- s.exprId)
- evalSubqueries(physicalSubquery)
- Literal(physicalSubquery.eval(), s.dataType)
+ // Wrap the scalar subquery in a Project over OneRowRelation to
execute it
+ // through the normal query execution path. This properly handles
table
+ // references in the subquery (e.g., V2 tables).
+ val wrappedPlan = Project(Seq(Alias(s, "result")()),
OneRowRelation())
+ val spark = SparkSession.active
+ val qe = spark.sessionState.executePlan(wrappedPlan)
+ val result = qe.executedPlan.executeCollect().head.get(0, s.dataType)
+ Literal(result, s.dataType)
Review Comment:
Cast is explicitly handled in `TimeTravelSpec.create`, not relying on the
type coercion framework.
We also have test for it. the time travel test in
`DataSourceV2SQLSuite.scala` uses string as timestamp.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]