[
https://issues.apache.org/jira/browse/SPARK-34967?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
Hyukjin Kwon updated SPARK-34967:
---------------------------------
Description:
https://github.com/zulk666/SparkTestProject/blob/main/src/main/scala/FailStructWithWindow.scala
When I execute this example code in spark 3.0.2 or 2.4.7 receive normal result
but in spark 3.1.1 I've got exception.
This is struct selection after window function aggregation.
{code}
Exception in thread "main"
org.apache.spark.sql.catalyst.errors.package$TreeNodeException: Binding
attribute, tree: _gen_alias_32#32Exception in thread "main"
org.apache.spark.sql.catalyst.errors.package$TreeNodeException: Binding
attribute, tree: _gen_alias_32#32 at
org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56) at
org.apache.spark.sql.catalyst.expressions.BindReferences$$anonfun$bindReference$1.applyOrElse(BoundAttribute.scala:75)
at
org.apache.spark.sql.catalyst.expressions.BindReferences$$anonfun$bindReference$1.applyOrElse(BoundAttribute.scala:74)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$1(TreeNode.scala:317)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:73)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:317)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:322)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
at
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
at
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405) at
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358) at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:322)
at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:306)
at
org.apache.spark.sql.catalyst.expressions.BindReferences$.bindReference(BoundAttribute.scala:74)
at
org.apache.spark.sql.catalyst.expressions.BindReferences$.$anonfun$bindReferences$1(BoundAttribute.scala:96)
at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62) at
scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55) at
scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49) at
scala.collection.TraversableLike.map(TraversableLike.scala:286) at
scala.collection.TraversableLike.map$(TraversableLike.scala:279) at
scala.collection.AbstractTraversable.map(Traversable.scala:108) at
org.apache.spark.sql.catalyst.expressions.BindReferences$.bindReferences(BoundAttribute.scala:96)
at
org.apache.spark.sql.catalyst.expressions.InterpretedMutableProjection.<init>(InterpretedMutableProjection.scala:35)
at
org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$$anonfun$apply$19.applyOrElse(Optimizer.scala:1589)
at
org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$$anonfun$apply$19.applyOrElse(Optimizer.scala:1586)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$1(TreeNode.scala:317)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:73)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:317)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:171)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:169)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:322)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
at
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
at
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405) at
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358) at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:322)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:171)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:169)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:322)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
at
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
at
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405) at
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358) at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:322)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:171)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:169)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:306)
at
org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$.apply(Optimizer.scala:1586)
at
org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$.apply(Optimizer.scala:1585)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:216)
at scala.collection.IndexedSeqOptimized.foldLeft(IndexedSeqOptimized.scala:60)
at scala.collection.IndexedSeqOptimized.foldLeft$(IndexedSeqOptimized.scala:68)
at scala.collection.mutable.WrappedArray.foldLeft(WrappedArray.scala:38) at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:213)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:205)
at scala.collection.immutable.List.foreach(List.scala:431) at
org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:205)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$executeAndTrack$1(RuleExecutor.scala:183)
at
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:88)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:183)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$optimizedPlan$1(QueryExecution.scala:87)
at
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:143)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:143)
at
org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecution.scala:84)
at
org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:84)
at
org.apache.spark.sql.execution.QueryExecution.assertOptimized(QueryExecution.scala:95)
at
org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:113)
at
org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:110)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$simpleString$2(QueryExecution.scala:161)
at
org.apache.spark.sql.execution.ExplainUtils$.processPlan(ExplainUtils.scala:115)
at
org.apache.spark.sql.execution.QueryExecution.simpleString(QueryExecution.scala:161)
at
org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryExecution$$explainString(QueryExecution.scala:206)
at
org.apache.spark.sql.execution.QueryExecution.explainString(QueryExecution.scala:175)
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:98)
at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3685) at
org.apache.spark.sql.Dataset.head(Dataset.scala:2722) at
org.apache.spark.sql.Dataset.take(Dataset.scala:2929) at
org.apache.spark.sql.Dataset.getRows(Dataset.scala:301) at
org.apache.spark.sql.Dataset.showString(Dataset.scala:338) at
org.apache.spark.sql.Dataset.show(Dataset.scala:825) at
org.apache.spark.sql.Dataset.show(Dataset.scala:784) at
org.apache.spark.sql.Dataset.show(Dataset.scala:793) at
com.zz.demo.FailStructWithWindow$.main(FailStructWithWindow.scala:24) at
com.zz.demo.FailStructWithWindow.main(FailStructWithWindow.scala)Caused by:
java.lang.RuntimeException: Couldn't find _gen_alias_32#32 in [_w0#17,a#7,b#8]
at scala.sys.package$.error(package.scala:30) at
org.apache.spark.sql.catalyst.expressions.BindReferences$$anonfun$bindReference$1.$anonfun$applyOrElse$1(BoundAttribute.scala:81)
at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
... 99 more21/04/06 14:45:33 INFO SparkContext: Invoking stop() from shutdown
hook21/04/06 14:45:33 INFO SparkUI: Stopped Spark web UI at
http://WPU8L0066587.ad.ing.net:404021/04/06 14:45:33 INFO
MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!21/04/06
14:45:33 INFO MemoryStore: MemoryStore cleared21/04/06 14:45:33 INFO
BlockManager: BlockManager stopped21/04/06 14:45:33 INFO BlockManagerMaster:
BlockManagerMaster stopped21/04/06 14:45:33 INFO
OutputCommitCoordinator$OutputCommitCoordinatorEndpoint:
OutputCommitCoordinator stopped!21/04/06 14:45:33 INFO SparkContext:
Successfully stopped SparkContext21/04/06 14:45:33 INFO ShutdownHookManager:
Shutdown hook called21/04/06 14:45:33 INFO ShutdownHookManager: Deleting
directory
C:\Users\ug74ks\AppData\Local\Temp\spark-8072ac1a-b229-4b02-adea-d788b2b29980
Process finished with exit code 1
{code}
was:
https://github.com/zulk666/SparkTestProject/blob/main/src/main/scala/FailStructWithWindow.scala
When I execute this example code in spark 3.0.2 or 2.4.7 receive normal result
but in spark 3.1.1 I've got exception.
This is struct selection after window function aggregation.
Exception in thread "main"
org.apache.spark.sql.catalyst.errors.package$TreeNodeException: Binding
attribute, tree: _gen_alias_32#32Exception in thread "main"
org.apache.spark.sql.catalyst.errors.package$TreeNodeException: Binding
attribute, tree: _gen_alias_32#32 at
org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56) at
org.apache.spark.sql.catalyst.expressions.BindReferences$$anonfun$bindReference$1.applyOrElse(BoundAttribute.scala:75)
at
org.apache.spark.sql.catalyst.expressions.BindReferences$$anonfun$bindReference$1.applyOrElse(BoundAttribute.scala:74)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$1(TreeNode.scala:317)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:73)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:317)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:322)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
at
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
at
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405) at
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358) at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:322)
at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:306)
at
org.apache.spark.sql.catalyst.expressions.BindReferences$.bindReference(BoundAttribute.scala:74)
at
org.apache.spark.sql.catalyst.expressions.BindReferences$.$anonfun$bindReferences$1(BoundAttribute.scala:96)
at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286)
at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62) at
scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55) at
scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49) at
scala.collection.TraversableLike.map(TraversableLike.scala:286) at
scala.collection.TraversableLike.map$(TraversableLike.scala:279) at
scala.collection.AbstractTraversable.map(Traversable.scala:108) at
org.apache.spark.sql.catalyst.expressions.BindReferences$.bindReferences(BoundAttribute.scala:96)
at
org.apache.spark.sql.catalyst.expressions.InterpretedMutableProjection.<init>(InterpretedMutableProjection.scala:35)
at
org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$$anonfun$apply$19.applyOrElse(Optimizer.scala:1589)
at
org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$$anonfun$apply$19.applyOrElse(Optimizer.scala:1586)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$1(TreeNode.scala:317)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:73)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:317)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:171)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:169)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:322)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
at
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
at
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405) at
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358) at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:322)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:171)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:169)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:322)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
at
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
at
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405) at
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358) at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:322)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:171)
at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:169)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:306)
at
org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$.apply(Optimizer.scala:1586)
at
org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$.apply(Optimizer.scala:1585)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:216)
at scala.collection.IndexedSeqOptimized.foldLeft(IndexedSeqOptimized.scala:60)
at scala.collection.IndexedSeqOptimized.foldLeft$(IndexedSeqOptimized.scala:68)
at scala.collection.mutable.WrappedArray.foldLeft(WrappedArray.scala:38) at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:213)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:205)
at scala.collection.immutable.List.foreach(List.scala:431) at
org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:205)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$executeAndTrack$1(RuleExecutor.scala:183)
at
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:88)
at
org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:183)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$optimizedPlan$1(QueryExecution.scala:87)
at
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:143)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:143)
at
org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecution.scala:84)
at
org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:84)
at
org.apache.spark.sql.execution.QueryExecution.assertOptimized(QueryExecution.scala:95)
at
org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:113)
at
org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:110)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$simpleString$2(QueryExecution.scala:161)
at
org.apache.spark.sql.execution.ExplainUtils$.processPlan(ExplainUtils.scala:115)
at
org.apache.spark.sql.execution.QueryExecution.simpleString(QueryExecution.scala:161)
at
org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryExecution$$explainString(QueryExecution.scala:206)
at
org.apache.spark.sql.execution.QueryExecution.explainString(QueryExecution.scala:175)
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:98)
at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3685) at
org.apache.spark.sql.Dataset.head(Dataset.scala:2722) at
org.apache.spark.sql.Dataset.take(Dataset.scala:2929) at
org.apache.spark.sql.Dataset.getRows(Dataset.scala:301) at
org.apache.spark.sql.Dataset.showString(Dataset.scala:338) at
org.apache.spark.sql.Dataset.show(Dataset.scala:825) at
org.apache.spark.sql.Dataset.show(Dataset.scala:784) at
org.apache.spark.sql.Dataset.show(Dataset.scala:793) at
com.zz.demo.FailStructWithWindow$.main(FailStructWithWindow.scala:24) at
com.zz.demo.FailStructWithWindow.main(FailStructWithWindow.scala)Caused by:
java.lang.RuntimeException: Couldn't find _gen_alias_32#32 in [_w0#17,a#7,b#8]
at scala.sys.package$.error(package.scala:30) at
org.apache.spark.sql.catalyst.expressions.BindReferences$$anonfun$bindReference$1.$anonfun$applyOrElse$1(BoundAttribute.scala:81)
at org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
... 99 more21/04/06 14:45:33 INFO SparkContext: Invoking stop() from shutdown
hook21/04/06 14:45:33 INFO SparkUI: Stopped Spark web UI at
http://WPU8L0066587.ad.ing.net:404021/04/06 14:45:33 INFO
MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!21/04/06
14:45:33 INFO MemoryStore: MemoryStore cleared21/04/06 14:45:33 INFO
BlockManager: BlockManager stopped21/04/06 14:45:33 INFO BlockManagerMaster:
BlockManagerMaster stopped21/04/06 14:45:33 INFO
OutputCommitCoordinator$OutputCommitCoordinatorEndpoint:
OutputCommitCoordinator stopped!21/04/06 14:45:33 INFO SparkContext:
Successfully stopped SparkContext21/04/06 14:45:33 INFO ShutdownHookManager:
Shutdown hook called21/04/06 14:45:33 INFO ShutdownHookManager: Deleting
directory
C:\Users\ug74ks\AppData\Local\Temp\spark-8072ac1a-b229-4b02-adea-d788b2b29980
Process finished with exit code 1
> Regression in spark 3.1.1 for window function and struct binding resolution
> ---------------------------------------------------------------------------
>
> Key: SPARK-34967
> URL: https://issues.apache.org/jira/browse/SPARK-34967
> Project: Spark
> Issue Type: Bug
> Components: SQL
> Affects Versions: 3.1.1
> Reporter: Lukasz Z
> Priority: Major
>
> https://github.com/zulk666/SparkTestProject/blob/main/src/main/scala/FailStructWithWindow.scala
> When I execute this example code in spark 3.0.2 or 2.4.7 receive normal
> result but in spark 3.1.1 I've got exception.
> This is struct selection after window function aggregation.
> {code}
> Exception in thread "main"
> org.apache.spark.sql.catalyst.errors.package$TreeNodeException: Binding
> attribute, tree: _gen_alias_32#32Exception in thread "main"
> org.apache.spark.sql.catalyst.errors.package$TreeNodeException: Binding
> attribute, tree: _gen_alias_32#32 at
> org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:56) at
> org.apache.spark.sql.catalyst.expressions.BindReferences$$anonfun$bindReference$1.applyOrElse(BoundAttribute.scala:75)
> at
> org.apache.spark.sql.catalyst.expressions.BindReferences$$anonfun$bindReference$1.applyOrElse(BoundAttribute.scala:74)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$1(TreeNode.scala:317)
> at
> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:73)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:317)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:322)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:322)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:306) at
> org.apache.spark.sql.catalyst.expressions.BindReferences$.bindReference(BoundAttribute.scala:74)
> at
> org.apache.spark.sql.catalyst.expressions.BindReferences$.$anonfun$bindReferences$1(BoundAttribute.scala:96)
> at
> scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286) at
> scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62) at
> scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55) at
> scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49) at
> scala.collection.TraversableLike.map(TraversableLike.scala:286) at
> scala.collection.TraversableLike.map$(TraversableLike.scala:279) at
> scala.collection.AbstractTraversable.map(Traversable.scala:108) at
> org.apache.spark.sql.catalyst.expressions.BindReferences$.bindReferences(BoundAttribute.scala:96)
> at
> org.apache.spark.sql.catalyst.expressions.InterpretedMutableProjection.<init>(InterpretedMutableProjection.scala:35)
> at
> org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$$anonfun$apply$19.applyOrElse(Optimizer.scala:1589)
> at
> org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$$anonfun$apply$19.applyOrElse(Optimizer.scala:1586)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$1(TreeNode.scala:317)
> at
> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:73)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:317)
> at
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
> at
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:171)
> at
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:169)
> at
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
> at
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:322)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:322)
> at
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
> at
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:171)
> at
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:169)
> at
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
> at
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$3(TreeNode.scala:322)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$mapChildren$1(TreeNode.scala:407)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:243)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:405)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:358)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:322)
> at
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
> at
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:171)
> at
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:169)
> at
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
> at
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
> at
> org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:306) at
> org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$.apply(Optimizer.scala:1586)
> at
> org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation$.apply(Optimizer.scala:1585)
> at
> org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:216)
> at
> scala.collection.IndexedSeqOptimized.foldLeft(IndexedSeqOptimized.scala:60)
> at
> scala.collection.IndexedSeqOptimized.foldLeft$(IndexedSeqOptimized.scala:68)
> at scala.collection.mutable.WrappedArray.foldLeft(WrappedArray.scala:38) at
> org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:213)
> at
> org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:205)
> at scala.collection.immutable.List.foreach(List.scala:431) at
> org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:205)
> at
> org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$executeAndTrack$1(RuleExecutor.scala:183)
> at
> org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:88)
> at
> org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:183)
> at
> org.apache.spark.sql.execution.QueryExecution.$anonfun$optimizedPlan$1(QueryExecution.scala:87)
> at
> org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
> at
> org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:143)
> at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at
> org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:143)
> at
> org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecution.scala:84)
> at
> org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:84)
> at
> org.apache.spark.sql.execution.QueryExecution.assertOptimized(QueryExecution.scala:95)
> at
> org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:113)
> at
> org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:110)
> at
> org.apache.spark.sql.execution.QueryExecution.$anonfun$simpleString$2(QueryExecution.scala:161)
> at
> org.apache.spark.sql.execution.ExplainUtils$.processPlan(ExplainUtils.scala:115)
> at
> org.apache.spark.sql.execution.QueryExecution.simpleString(QueryExecution.scala:161)
> at
> org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryExecution$$explainString(QueryExecution.scala:206)
> at
> org.apache.spark.sql.execution.QueryExecution.explainString(QueryExecution.scala:175)
> at
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:98)
> at
> org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
> at
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
> at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
> at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3685) at
> org.apache.spark.sql.Dataset.head(Dataset.scala:2722) at
> org.apache.spark.sql.Dataset.take(Dataset.scala:2929) at
> org.apache.spark.sql.Dataset.getRows(Dataset.scala:301) at
> org.apache.spark.sql.Dataset.showString(Dataset.scala:338) at
> org.apache.spark.sql.Dataset.show(Dataset.scala:825) at
> org.apache.spark.sql.Dataset.show(Dataset.scala:784) at
> org.apache.spark.sql.Dataset.show(Dataset.scala:793) at
> com.zz.demo.FailStructWithWindow$.main(FailStructWithWindow.scala:24) at
> com.zz.demo.FailStructWithWindow.main(FailStructWithWindow.scala)Caused by:
> java.lang.RuntimeException: Couldn't find _gen_alias_32#32 in
> [_w0#17,a#7,b#8] at scala.sys.package$.error(package.scala:30) at
> org.apache.spark.sql.catalyst.expressions.BindReferences$$anonfun$bindReference$1.$anonfun$applyOrElse$1(BoundAttribute.scala:81)
> at
> org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52)
> ... 99 more21/04/06 14:45:33 INFO SparkContext: Invoking stop() from shutdown
> hook21/04/06 14:45:33 INFO SparkUI: Stopped Spark web UI at
> http://WPU8L0066587.ad.ing.net:404021/04/06 14:45:33 INFO
> MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint
> stopped!21/04/06 14:45:33 INFO MemoryStore: MemoryStore cleared21/04/06
> 14:45:33 INFO BlockManager: BlockManager stopped21/04/06 14:45:33 INFO
> BlockManagerMaster: BlockManagerMaster stopped21/04/06 14:45:33 INFO
> OutputCommitCoordinator$OutputCommitCoordinatorEndpoint:
> OutputCommitCoordinator stopped!21/04/06 14:45:33 INFO SparkContext:
> Successfully stopped SparkContext21/04/06 14:45:33 INFO ShutdownHookManager:
> Shutdown hook called21/04/06 14:45:33 INFO ShutdownHookManager: Deleting
> directory
> C:\Users\ug74ks\AppData\Local\Temp\spark-8072ac1a-b229-4b02-adea-d788b2b29980
> Process finished with exit code 1
> {code}
--
This message was sent by Atlassian Jira
(v8.3.4#803005)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]