ajantha-bhat commented on a change in pull request #3794:
URL: https://github.com/apache/iceberg/pull/3794#discussion_r774314025
##########
File path:
spark/v3.0/spark/src/main/scala/org/apache/spark/sql/execution/datasources/SparkExpressionConverter.scala
##########
@@ -30,4 +33,18 @@ object SparkExpressionConverter {
// But these two conversions already exist and well tested. So, we are
going with this approach.
SparkFilters.convert(DataSourceStrategy.translateFilter(sparkExpression,
supportNestedPredicatePushdown = true).get)
}
+
+ @throws[AnalysisException]
+ def collectResolvedSparkExpression(session: SparkSession, tableName: String,
where: String): Expression = {
+ var expression: Expression = null
+ // Add a dummy prefix linking to the table to collect the resolved spark
expression from optimized plan.
+ val prefix = String.format("SELECT 42 from %s where ", tableName)
+ val logicalPlan = session.sessionState.sqlParser.parsePlan(prefix + where)
+ val optimizedLogicalPlan =
session.sessionState.executePlan(logicalPlan).optimizedPlan
Review comment:
The only change compared to spark-3.2 PR is this. `executePlan` accepts
only one argument in spark-3.0 and spark-3.1
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]