rami-lv commented on PR #45589:
URL: https://github.com/apache/spark/pull/45589#issuecomment-2515366413

   I encountered a similar issue, I am using `spark3.5.3` meaning it already 
has this commit. 
   I think this is caused by Coalesce expression but I am not sure why. 
   I would appreciate any indications for easier debugging. 
   ```
   2024/12/03 05:21:58 [stderr] : org.apache.spark.SparkException: 
[INTERNAL_ERROR] The Spark SQL phase optimization failed with an internal 
error. You hit a bug in Spark or the Spark plugins you use. Please, report this 
bug to the corresponding communities or vendors, and provide the full stack 
trace.
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.SparkException$.internalError(SparkException.scala:107)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.QueryExecution$.toInternalError(QueryExecution.scala:536)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:548)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:219)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:218)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecution.scala:148)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:144)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.QueryExecution.assertOptimized(QueryExecution.scala:162)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:182)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:179)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.QueryExecution.simpleString(QueryExecution.scala:238)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryExecution$$explainString(QueryExecution.scala:284)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.QueryExecution.explainString(QueryExecution.scala:252)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:117)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:201)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:108)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:66)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.Dataset.withAction(Dataset.scala:4321)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.Dataset.checkpoint(Dataset.scala:727)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.Dataset.checkpoint(Dataset.scala:690)
   2024/12/03 05:21:58 [stderr]         at 
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
   2024/12/03 05:21:58 [stderr]         at 
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
   2024/12/03 05:21:58 [stderr]         at 
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
   2024/12/03 05:21:58 [stderr]         at 
java.base/java.lang.reflect.Method.invoke(Method.java:569)
   2024/12/03 05:21:58 [stderr]         at 
py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
   2024/12/03 05:21:58 [stderr]         at 
py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:374)
   2024/12/03 05:21:58 [stderr]         at py4j.Gateway.invoke(Gateway.java:282)
   2024/12/03 05:21:58 [stderr]         at 
py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
   2024/12/03 05:21:58 [stderr]         at 
py4j.commands.CallCommand.execute(CallCommand.java:79)
   2024/12/03 05:21:58 [stderr]         at 
py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
   2024/12/03 05:21:58 [stderr]         at 
py4j.ClientServerConnection.run(ClientServerConnection.java:106)
   2024/12/03 05:21:58 [stderr]         at 
java.base/java.lang.Thread.run(Thread.java:840)
   2024/12/03 05:21:58 [stderr] Caused by: java.lang.AssertionError: assertion 
failed
   2024/12/03 05:21:58 [stderr]         at 
scala.Predef$.assert(Predef.scala:208)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.util.V2ExpressionBuilder.generateExpression(V2ExpressionBuilder.scala:143)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.util.V2ExpressionBuilder.$anonfun$generateExpressionWithNameByChildren$1(V2ExpressionBuilder.scala:359)
   2024/12/03 05:21:58 [stderr]         at 
scala.collection.TraversableLike.$anonfun$flatMap$1(TraversableLike.scala:293)
   2024/12/03 05:21:58 [stderr]         at 
scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
   2024/12/03 05:21:58 [stderr]         at 
scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
   2024/12/03 05:21:58 [stderr]         at 
scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
   2024/12/03 05:21:58 [stderr]         at 
scala.collection.TraversableLike.flatMap(TraversableLike.scala:293)
   2024/12/03 05:21:58 [stderr]         at 
scala.collection.TraversableLike.flatMap$(TraversableLike.scala:290)
   2024/12/03 05:21:58 [stderr]         at 
scala.collection.AbstractTraversable.flatMap(Traversable.scala:108)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.util.V2ExpressionBuilder.generateExpressionWithNameByChildren(V2ExpressionBuilder.scala:359)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.util.V2ExpressionBuilder.generateExpressionWithName(V2ExpressionBuilder.scala:351)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.util.V2ExpressionBuilder.generateExpression(V2ExpressionBuilder.scala:100)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.util.V2ExpressionBuilder.build(V2ExpressionBuilder.scala:33)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.datasources.v2.PushablePredicate$.unapply(DataSourceV2Strategy.scala:663)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.datasources.v2.DataSourceV2Strategy$.translateLeafNodeFilterV2(DataSourceV2Strategy.scala:557)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.datasources.v2.DataSourceV2Strategy$.translateFilterV2WithMapping(DataSourceV2Strategy.scala:610)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.datasources.v2.DataSourceV2Strategy$.translateFilterV2WithMapping(DataSourceV2Strategy.scala:607)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.datasources.v2.PushDownUtils$.$anonfun$pushFilters$3(PushDownUtils.scala:88)
   2024/12/03 05:21:58 [stderr]         at 
scala.collection.immutable.List.foreach(List.scala:431)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.datasources.v2.PushDownUtils$.pushFilters(PushDownUtils.scala:85)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.datasources.v2.V2ScanRelationPushDown$$anonfun$pushDownFilters$1.applyOrElse(V2ScanRelationPushDown.scala:74)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.datasources.v2.V2ScanRelationPushDown$$anonfun$pushDownFilters$1.applyOrElse(V2ScanRelationPushDown.scala:61)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:461)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(origin.scala:76)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:461)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:32)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$3(TreeNode.scala:466)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1216)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1215)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.Project.mapChildren(basicLogicalOperators.scala:71)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:466)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:32)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$3(TreeNode.scala:466)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.BinaryLike.mapChildren(TreeNode.scala:1242)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.BinaryLike.mapChildren$(TreeNode.scala:1241)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.Join.mapChildren(basicLogicalOperators.scala:543)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:466)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:32)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$3(TreeNode.scala:466)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1216)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1215)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.Project.mapChildren(basicLogicalOperators.scala:71)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:466)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:32)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:437)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:405)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.datasources.v2.V2ScanRelationPushDown$.pushDownFilters(V2ScanRelationPushDown.scala:61)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.datasources.v2.V2ScanRelationPushDown$.$anonfun$apply$3(V2ScanRelationPushDown.scala:45)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.datasources.v2.V2ScanRelationPushDown$.$anonfun$apply$8(V2ScanRelationPushDown.scala:52)
   2024/12/03 05:21:58 [stderr]         at 
scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)
   2024/12/03 05:21:58 [stderr]         at 
scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:122)
   2024/12/03 05:21:58 [stderr]         at 
scala.collection.immutable.List.foldLeft(List.scala:91)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.datasources.v2.V2ScanRelationPushDown$.apply(V2ScanRelationPushDown.scala:51)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.datasources.v2.V2ScanRelationPushDown$.apply(V2ScanRelationPushDown.scala:38)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:222)
   2024/12/03 05:21:58 [stderr]         at 
scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)
   2024/12/03 05:21:58 [stderr]         at 
scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:122)
   2024/12/03 05:21:58 [stderr]         at 
scala.collection.immutable.List.foldLeft(List.scala:91)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:219)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:211)
   2024/12/03 05:21:58 [stderr]         at 
scala.collection.immutable.List.foreach(List.scala:431)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:211)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$executeAndTrack$1(RuleExecutor.scala:182)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:89)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:182)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$optimizedPlan$1(QueryExecution.scala:152)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:138)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$2(QueryExecution.scala:219)
   2024/12/03 05:21:58 [stderr]         at 
org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:546)
   
   ```


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to