acvictor commented on PR #11002:
URL: 
https://github.com/apache/incubator-gluten/pull/11002#issuecomment-3479349119

   > > I think SparkContext.getOrCreate is being called during cleanup 
(triggered by YARN in our case).
   > 
   > Could you provide the full error stack trace?
   
   This is what I have
   ```
   java.lang.IllegalStateException: Shutdown hooks cannot be modified during 
shutdown.
       at 
org.apache.spark.util.SparkShutdownHookManager.add(ShutdownHookManager.scala:195)
       at 
org.apache.spark.util.ShutdownHookManager$.addShutdownHook(ShutdownHookManager.scala:153)
       at 
org.apache.spark.util.SparkShutdownManagerUtil$.addHookForTempDirRemoval(SparkShutdownManagerUtil.scala:29)
       at 
org.apache.spark.util.SparkDirectoryUtil.$anonfun$ROOTS$1(SparkDirectoryUtil.scala:40)
       at 
scala.collection.TraversableLike.$anonfun$flatMap$1(TraversableLike.scala:293)
       at 
scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
       at 
scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
       at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
       at scala.collection.TraversableLike.flatMap(TraversableLike.scala:293)
       at scala.collection.TraversableLike.flatMap$(TraversableLike.scala:290)
       at scala.collection.mutable.ArrayOps$ofRef.flatMap(ArrayOps.scala:198)
       at 
org.apache.spark.util.SparkDirectoryUtil.<init>(SparkDirectoryUtil.scala:35)
       at 
org.apache.spark.util.SparkDirectoryUtil$.init(SparkDirectoryUtil.scala:79)
       at 
org.apache.spark.util.SparkDirectoryUtil$.init(SparkDirectoryUtil.scala:74)
       at 
org.apache.gluten.backendsapi.velox.VeloxListenerApi.onDriverStart(VeloxListenerApi.scala:95)
       at 
org.apache.gluten.backendsapi.SubstraitBackend.onDriverStart(SubstraitBackend.scala:27)
       at 
org.apache.gluten.backendsapi.SubstraitBackend.onDriverStart$(SubstraitBackend.scala:26)
       at 
org.apache.gluten.backendsapi.velox.VeloxBackend.onDriverStart(VeloxBackend.scala:62)
       at 
org.apache.gluten.GlutenDriverPlugin.$anonfun$init$1(GlutenPlugin.scala:72)
       at 
org.apache.gluten.GlutenDriverPlugin.$anonfun$init$1$adapted(GlutenPlugin.scala:72)
       at 
scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
       at 
scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
       at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
       at org.apache.gluten.GlutenDriverPlugin.init(GlutenPlugin.scala:72)
       at 
org.apache.spark.internal.plugin.DriverPluginContainer.$anonfun$driverPlugins$1(PluginContainer.scala:53)
       at 
scala.collection.TraversableLike.$anonfun$flatMap$1(TraversableLike.scala:293)
       at 
scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)
       at 
scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)
       at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)
       at scala.collection.TraversableLike.flatMap(TraversableLike.scala:293)
       at scala.collection.TraversableLike.flatMap$(TraversableLike.scala:290)
       at scala.collection.AbstractTraversable.flatMap(Traversable.scala:108)
       at 
org.apache.spark.internal.plugin.DriverPluginContainer.<init>(PluginContainer.scala:46)
       at 
org.apache.spark.internal.plugin.PluginContainer$.apply(PluginContainer.scala:210)
       at 
org.apache.spark.internal.plugin.PluginContainer$.apply(PluginContainer.scala:193)
       at org.apache.spark.SparkContext.<init>(SparkContext.scala:638)
       at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:3207)
       at org.apache.spark.SparkContext.getOrCreate(SparkContext.scala)
       at org.apache.livy.rsc.driver.SparkEntries.sc(SparkEntries.java:53)
       at 
org.apache.livy.rsc.driver.SparkEntries.sparkSession(SparkEntries.java:72)
       at 
org.apache.livy.repl.AbstractSparkInterpreter.postStart(AbstractSparkInterpreter.scala:144)
       at 
org.apache.livy.repl.SparkInterpreter.$anonfun$start$2(SparkInterpreter.scala:142)
       at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
       at 
org.apache.livy.repl.AbstractSparkInterpreter.restoreContextClassLoader(AbstractSparkInterpreter.scala:498)
       at 
org.apache.livy.repl.SparkInterpreter.start(SparkInterpreter.scala:109)
       at org.apache.livy.repl.Session.$anonfun$start$1(Session.scala:349)
       at scala.concurrent.Future$.$anonfun$apply$1(Future.scala:659)
       at scala.util.Success.$anonfun$map$1(Try.scala:255)
       at scala.util.Success.map(Try.scala:213)
       at scala.concurrent.Future.$anonfun$map$1(Future.scala:292)
       at scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33)
       at scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)
       at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)
       at 
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
       at 
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
       at java.base/java.lang.Thread.run(Thread.java:829)
   ```


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to