This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new d7317b03e97 [SPARK-39178][CORE] SparkFatalException should show root 
cause when print error stack
d7317b03e97 is described below

commit d7317b03e975f8dc1a8c276dd0a931e00c478717
Author: Angerszhuuuu <angers....@gmail.com>
AuthorDate: Fri May 13 16:47:11 2022 +0300

    [SPARK-39178][CORE] SparkFatalException should show root cause when print 
error stack
    
    ### What changes were proposed in this pull request?
    Our user meet an case when running broadcast, throw `SparkFatalException`, 
but in error stack, it don't show the error case.
    
    ### Why are the changes needed?
    Make exception more clear
    
    ### Does this PR introduce _any_ user-facing change?
    User can got root cause when application throw `SparkFatalException`.
    
    ### How was this patch tested?
    For ut
    ```
      test("xxxx") {
        throw new SparkFatalException(
          new OutOfMemoryError("Not enough memory to build and broadcast the 
table to all " +
          "worker nodes. As a workaround, you can either disable broadcast by 
setting " +
          s"driver memory by setting ${SparkLauncher.DRIVER_MEMORY} to a higher 
value.")
          .initCause(null))
      }
    ```
    
    Before this pr:
    ```
    [info]   org.apache.spark.util.SparkFatalException:
    [info]   at 
org.apache.spark.SparkContextSuite.$anonfun$new$1(SparkContextSuite.scala:59)
    [info]   at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
    [info]   at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
    [info]   at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
    [info]   at org.scalatest.Transformer.apply(Transformer.scala:22)
    [info]   at org.scalatest.Transformer.apply(Transformer.scala:20)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:190)
    [info]   at 
org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:203)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:188)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:200)
    [info]   at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:200)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:182)
    [info]   at 
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:64)
    [info]   at 
org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
    [info]   at 
org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
    [info]   at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:64)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:233)
    [info]   at 
org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
    [info]   at scala.collection.immutable.List.foreach(List.scala:431)
    [info]   at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
    [info]   at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
    [info]   at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:233)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:232)
    [info]   at 
org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1563)
    [info]   at org.scalatest.Suite.run(Suite.scala:1112)
    ```
    
    After this pr:
    ```
    [info]   org.apache.spark.util.SparkFatalException: 
java.lang.OutOfMemoryError: Not enough memory to build and broadcast the table 
to all worker nodes. As a workaround, you can either disable broadcast by 
setting driver memory by setting spark.driver.memory to a higher value.
    [info]   at 
org.apache.spark.SparkContextSuite.$anonfun$new$1(SparkContextSuite.scala:59)
    [info]   at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
    [info]   at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
    [info]   at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
    [info]   at org.scalatest.Transformer.apply(Transformer.scala:22)
    [info]   at org.scalatest.Transformer.apply(Transformer.scala:20)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:190)
    [info]   at 
org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:203)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:188)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:200)
    [info]   at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:200)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:182)
    [info]   at 
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:64)
    [info]   at 
org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
    [info]   at 
org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
    [info]   at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:64)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:233)
    [info]   at 
org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
    [info]   at scala.collection.immutable.List.foreach(List.scala:431)
    [info]   at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
    [info]   at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
    [info]   at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:233)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:232)
    [info]   at 
org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1563)
    [info]   at org.scalatest.Suite.run(Suite.scala:1112)
    [info]   at org.scalatest.Suite.run$(Suite.scala:1094)
    [info]   at 
org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1563)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:237)
    [info]   at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:237)
    [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:236)
    [info]   at 
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:64)
    [info]   at 
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
    [info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
    [info]   at 
org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
    [info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:64)
    [info]   at 
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:318)
    [info]   at 
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:513)
    [info]   at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:413)
    [info]   at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    [info]   at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
    [info]   at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
    [info]   at java.lang.Thread.run(Thread.java:748)
    [info]   Cause: java.lang.OutOfMemoryError: Not enough memory to build and 
broadcast the table to all worker nodes. As a workaround, you can either 
disable broadcast by setting driver memory by setting spark.driver.memory to a 
higher value.
    [info]   at 
org.apache.spark.SparkContextSuite.$anonfun$new$1(SparkContextSuite.scala:58)
    ```
    
    Closes #36539 from AngersZhuuuu/SPARK-39178.
    
    Authored-by: Angerszhuuuu <angers....@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/scala/org/apache/spark/util/SparkFatalException.scala | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git 
a/core/src/main/scala/org/apache/spark/util/SparkFatalException.scala 
b/core/src/main/scala/org/apache/spark/util/SparkFatalException.scala
index 1aa2009fa9b..d15d300ae3c 100644
--- a/core/src/main/scala/org/apache/spark/util/SparkFatalException.scala
+++ b/core/src/main/scala/org/apache/spark/util/SparkFatalException.scala
@@ -24,4 +24,5 @@ package org.apache.spark.util
  * which is run by using ThreadUtils.awaitResult. ThreadUtils.awaitResult will 
catch
  * it and re-throw the original exception/error.
  */
-private[spark] final class SparkFatalException(val throwable: Throwable) 
extends Exception
+private[spark] final class SparkFatalException(val throwable: Throwable)
+  extends Exception(throwable)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to