maryannxue commented on a change in pull request #23531: [SPARK-24497][SQL]
Support recursive SQL query
URL: https://github.com/apache/spark/pull/23531#discussion_r261737741
##########
File path:
sql/core/src/main/scala/org/apache/spark/sql/execution/basicPhysicalOperators.scala
##########
@@ -228,6 +233,97 @@ case class FilterExec(condition: Expression, child:
SparkPlan)
override def outputPartitioning: Partitioning = child.outputPartitioning
}
+/** Physical plan for RecursiveTable. */
+case class RecursiveTableExec(
+ name: String,
+ anchorTerms: Seq[SparkPlan],
+ recursiveTerms: Seq[SparkPlan],
+ limit: Option[Long]) extends SparkPlan with LoopStart {
+ override def children: Seq[SparkPlan] = anchorTerms ++ recursiveTerms
+
+ override def output: Seq[Attribute] =
anchorTerms.head.output.map(_.withNullability(true))
+
+ override val doNotPrepareInAdvance: Seq[SparkPlan] = recursiveTerms
+
+ override protected def doExecute(): RDD[InternalRow] = {
+ val prevIterationRDDs = ArrayBuffer.empty[RDD[InternalRow]]
+ var prevIterationCount = 0L
+ val anchorTermsIterator = anchorTerms.iterator
+ while (anchorTermsIterator.hasNext && limit.forall(_ >
prevIterationCount)) {
+ val anchorTerm = anchorTermsIterator.next()
+
+ val rdd = anchorTerm.execute().map(_.copy()).cache()
+ val count = rdd.count()
+ if (count > 0) {
+ prevIterationRDDs += rdd
+ prevIterationCount += count
+ }
+ }
+
+ val allRDDs = ArrayBuffer(prevIterationRDDs: _*)
+ var allCount = prevIterationCount
+ var level = 0
+ val levelLimit = conf.recursionLevelLimit
+ while (prevIterationCount > 0 && limit.forall(_ > allCount)) {
+ if (level > levelLimit) {
+ throw new SparkException(s"Recursion level limit ${levelLimit} reached
but query has not " +
+ s"exhausted, try increasing ${SQLConf.RECURSION_LEVEL_LIMIT.key}")
+ }
+
+ val prevIterationResult = sparkContext.union(prevIterationRDDs)
+
+ prevIterationRDDs.clear()
+ prevIterationCount = 0
+ val recursiveTermsIterator = recursiveTerms.iterator
+ while (recursiveTermsIterator.hasNext && limit.forall(_ > allCount +
prevIterationCount)) {
+ val recursiveTerm = recursiveTermsIterator.next()
+
+ val newCoordinators = mutable.Map.empty[ExchangeCoordinator,
Some[ExchangeCoordinator]]
+ val newRecursiveTerm = recursiveTerm.transform {
Review comment:
Also if we do it that way, we won't need "doNotPrepareInAdvance" at all.
The down side of this change would be efficiency, e.g., the logical to
physical compilation overhead, the recursion independent subquery or broadcast
that can potentially be reused (which is not reused here anyway). But we may
implement such optimizations on logical level too.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]