cloud-fan commented on code in PR #48407:
URL: https://github.com/apache/spark/pull/48407#discussion_r1914519467
##########
sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala:
##########
@@ -82,27 +82,47 @@ abstract class SparkStrategies extends
QueryPlanner[SparkPlan] {
*/
object SpecialLimits extends Strategy {
override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
+ // Match the serialization pattern for case of Dataset.toJSON
// Call `planTakeOrdered` first which matches a larger plan.
- case ReturnAnswer(rootPlan) =>
planTakeOrdered(rootPlan).getOrElse(rootPlan match {
- // We should match the combination of limit and offset first, to get
the optimal physical
- // plan, instead of planning limit and offset separately.
- case LimitAndOffset(limit, offset, child) =>
- CollectLimitExec(limit = limit, child = planLater(child), offset =
offset)
- case OffsetAndLimit(offset, limit, child) =>
- // 'Offset a' then 'Limit b' is the same as 'Limit a + b' then
'Offset a'.
- CollectLimitExec(limit = offset + limit, child = planLater(child),
offset = offset)
- case Limit(IntegerLiteral(limit), child) =>
- CollectLimitExec(limit = limit, child = planLater(child))
- case logical.Offset(IntegerLiteral(offset), child) =>
- CollectLimitExec(child = planLater(child), offset = offset)
- case Tail(IntegerLiteral(limit), child) =>
- CollectTailExec(limit, planLater(child))
- case other => planLater(other)
- }) :: Nil
+ case ReturnAnswer(
+ SerializeFromObject(
+ serializer,
+ MapPartitions(
+ f,
Review Comment:
I'm not sure if it's safe to apply this special limit optimization for
`MapPartitions` with an arbitrary lambda function.
Are we able to implement `df.toJSON` with `Project` and the `to_json`
function?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]