hugw777 commented on issue #5331:
URL: https://github.com/apache/paimon/issues/5331#issuecomment-2747464989

   spark sql conf
   
   ```sql
   set spark.sql.optimizer.dynamicPartitionPruning.enabled=false;
   ```
   
   If I don't set the value of the 
spark.sql.optimizer.dynamicPartitionPruning.enabled parameter to false, I get 
Error in query: unresolved operator 'Filter dynamicpruning950'
   
   If I set the value of the 
spark.sql.optimizer.dynamicPartitionPruning.enabled parameter to false, throw 
java.lang.StackOverflowError.
   
   ```
   java.lang.RuntimeException: java.lang.RuntimeException: 
java.util.concurrent.ExecutionException: java.lang.StackOverflowError
                at 
org.apache.paimon.spark.commands.PaimonSparkWriter.commit(PaimonSparkWriter.scala:285)
                at 
org.apache.paimon.spark.commands.WriteIntoPaimonTable.run(WriteIntoPaimonTable.scala:64)
                at 
org.apache.paimon.spark.commands.PaimonDynamicPartitionOverwriteCommand.run(PaimonDynamicPartitionOverwriteCommand.scala:69)
                at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
                at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
                at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
                at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:98)
                at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:109)
                at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:169)
                at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:95)
                at 
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
                at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
                at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:98)
                at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:94)
                at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:584)
                at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:176)
                at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:584)
                at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
                at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
                at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
                at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
                at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
                at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:560)
                at 
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:94)
                at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:81)
                at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:79)
                at org.apache.spark.sql.Dataset.<init>(Dataset.scala:220)
                at 
org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100)
                at 
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
                at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97)
                at 
org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:622)
                at 
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
                at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:617)
                at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:651)
                at 
org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:67)
                at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:384)
                at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1(SparkSQLCLIDriver.scala:504)
                at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.$anonfun$processLine$1$adapted(SparkSQLCLIDriver.scala:498)
                at scala.collection.Iterator.foreach(Iterator.scala:943)
                at scala.collection.Iterator.foreach$(Iterator.scala:943)
                at 
scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
                at scala.collection.IterableLike.foreach(IterableLike.scala:74)
                at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
                at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
                at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processLine(SparkSQLCLIDriver.scala:498)
                at 
org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:336)
                at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:207)
                at 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
                at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
                at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
                at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
                at java.lang.reflect.Method.invoke(Method.java:498)
                at 
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
                at 
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:958)
                at 
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)
                at 
org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
                at 
org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
                at 
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1046)
                at 
org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1055)
                at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
        Caused by: java.lang.RuntimeException: 
java.util.concurrent.ExecutionException: java.lang.StackOverflowError
                at 
org.apache.paimon.utils.ScanParallelExecutor$1.advanceIfNeeded(ScanParallelExecutor.java:85)
                at 
org.apache.paimon.utils.ScanParallelExecutor$1.hasNext(ScanParallelExecutor.java:60)
                at 
org.apache.paimon.manifest.FileEntry.mergeEntries(FileEntry.java:126)
                at 
org.apache.paimon.manifest.FileEntry.mergeEntries(FileEntry.java:112)
                at 
org.apache.paimon.operation.AbstractFileStoreScan.readAndMergeFileEntries(AbstractFileStoreScan.java:395)
                at 
org.apache.paimon.operation.AbstractFileStoreScan.doPlan(AbstractFileStoreScan.java:299)
                at 
org.apache.paimon.operation.AbstractFileStoreScan.plan(AbstractFileStoreScan.java:223)
                at 
org.apache.paimon.operation.FileStoreCommitImpl.tryOverwrite(FileStoreCommitImpl.java:729)
                at 
org.apache.paimon.operation.FileStoreCommitImpl.overwrite(FileStoreCommitImpl.java:451)
                at 
org.apache.paimon.table.sink.TableCommitImpl.commitMultiple(TableCommitImpl.java:225)
                at 
org.apache.paimon.table.sink.TableCommitImpl.commit(TableCommitImpl.java:200)
                at 
org.apache.paimon.table.sink.TableCommitImpl.commit(TableCommitImpl.java:179)
                at 
org.apache.paimon.table.sink.TableCommitImpl.commit(TableCommitImpl.java:163)
                at 
org.apache.paimon.spark.commands.PaimonSparkWriter.commit(PaimonSparkWriter.scala:283)
                ... 59 more
        Caused by: java.util.concurrent.ExecutionException: 
java.lang.StackOverflowError
                at 
java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357)
                at 
java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1908)
                at 
org.apache.paimon.utils.ScanParallelExecutor$1.advanceIfNeeded(ScanParallelExecutor.java:83)
                ... 72 more
        Caused by: java.lang.StackOverflowError
                at 
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
                at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
                at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
                at 
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
                at 
java.util.concurrent.ForkJoinTask.getThrowableException(ForkJoinTask.java:598)
                at 
java.util.concurrent.ForkJoinTask.reportException(ForkJoinTask.java:677)
                at 
java.util.concurrent.ForkJoinTask.invoke(ForkJoinTask.java:735)
                at 
java.util.stream.ReduceOps$ReduceOp.evaluateParallel(ReduceOps.java:714)
                at 
java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:233)
                at 
java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:499)
                at 
org.apache.paimon.operation.AbstractFileStoreScan.lambda$readAndMergeFileEntries$6(AbstractFileStoreScan.java:386)
                at 
org.apache.paimon.utils.ScanParallelExecutor$1.lambda$advanceIfNeeded$0(ScanParallelExecutor.java:81)
                at 
java.util.concurrent.CompletableFuture$AsyncSupply.run(CompletableFuture.java:1604)
                at 
java.util.concurrent.CompletableFuture$AsyncSupply.exec(CompletableFuture.java:1596)
                at 
java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289)
                at 
java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1067)
                at 
java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1703)
                at 
java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:172)
        Caused by: java.lang.StackOverflowError
                at org.apache.paimon.predicate.Or.test(Or.java:54)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
                at 
org.apache.paimon.predicate.CompoundPredicate.test(CompoundPredicate.java:59)
                at org.apache.paimon.predicate.Or.test(Or.java:55)
   ```
   
   


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscr...@paimon.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to