seancxmao commented on a change in pull request #23885: [MINOR][DOCS][TEST]
Remove Akka leftover
URL: https://github.com/apache/spark/pull/23885#discussion_r260102148
##########
File path: core/src/test/resources/spark-events/local-1422981780767
##########
@@ -58,7 +58,7 @@
{"Event":"SparkListenerTaskEnd","Stage ID":2,"Stage Attempt ID":0,"Task
Type":"ResultTask","Task End Reason":{"Reason":"Success"},"Task Info":{"Task
ID":22,"Index":6,"Attempt":0,"Launch Time":1422981786298,"Executor
ID":"<driver>","Host":"localhost","Locality":"PROCESS_LOCAL","Speculative":false,"Getting
Result Time":0,"Finish
Time":1422981786342,"Failed":false,"Accumulables":[]},"Task Metrics":{"Host
Name":"localhost","Executor Deserialize Time":2,"Executor Run Time":34,"Result
Size":862,"JVM GC Time":0,"Result Serialization Time":0,"Memory Bytes
Spilled":0,"Disk Bytes Spilled":0,"Shuffle Read Metrics":{"Remote Blocks
Fetched":0,"Local Blocks Fetched":8,"Fetch Wait Time":0,"Remote Bytes Read":0}}}
{"Event":"SparkListenerTaskEnd","Stage ID":2,"Stage Attempt ID":0,"Task
Type":"ResultTask","Task End Reason":{"Reason":"ExceptionFailure","Class
Name":"java.lang.RuntimeException","Description":"got a 3, failing","Stack
Trace":[{"Declaring
Class":"$line11.$read$$iwC$$iwC$$iwC$$iwC$$anonfun$1","Method
Name":"apply","File Name":"<console>","Line Number":18},{"Declaring
Class":"$line11.$read$$iwC$$iwC$$iwC$$iwC$$anonfun$1","Method
Name":"apply","File Name":"<console>","Line Number":17},{"Declaring
Class":"scala.collection.Iterator$$anon$11","Method Name":"next","File
Name":"Iterator.scala","Line Number":328},{"Declaring
Class":"org.apache.spark.util.Utils$","Method Name":"getIteratorSize","File
Name":"Utils.scala","Line Number":1311},{"Declaring
Class":"org.apache.spark.rdd.RDD$$anonfun$count$1","Method Name":"apply","File
Name":"RDD.scala","Line Number":910},{"Declaring
Class":"org.apache.spark.rdd.RDD$$anonfun$count$1","Method Name":"apply","File
Name":"RDD.scala","Line Number":910},{"Declaring
Class":"org.apache.spark.SparkContext$$anonfun$runJob$4","Method
Name":"apply","File Name":"SparkContext.scala","Line Number":1314},{"Declaring
Class":"org.apache.spark.SparkContext$$anonfun$runJob$4","Method
Name":"apply","File Name":"SparkContext.scala","Line Number":1314},{"Declaring
Class":"org.apache.spark.scheduler.ResultTask","Method Name":"runTask","File
Name":"ResultTask.scala","Line Number":61},{"Declaring
Class":"org.apache.spark.scheduler.Task","Method Name":"run","File
Name":"Task.scala","Line Number":56},{"Declaring
Class":"org.apache.spark.executor.Executor$TaskRunner","Method
Name":"run","File Name":"Executor.scala","Line Number":196},{"Declaring
Class":"java.util.concurrent.ThreadPoolExecutor","Method
Name":"runWorker","File Name":"ThreadPoolExecutor.java","Line
Number":1145},{"Declaring
Class":"java.util.concurrent.ThreadPoolExecutor$Worker","Method
Name":"run","File Name":"ThreadPoolExecutor.java","Line
Number":615},{"Declaring Class":"java.lang.Thread","Method Name":"run","File
Name":"Thread.java","Line Number":745}],"Full Stack
Trace":"java.lang.RuntimeException: got a 3, failing\n\tat
$line11.$read$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:18)\n\tat
$line11.$read$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:17)\n\tat
scala.collection.Iterator$$anon$11.next(Iterator.scala:328)\n\tat
org.apache.spark.util.Utils$.getIteratorSize(Utils.scala:1311)\n\tat
org.apache.spark.rdd.RDD$$anonfun$count$1.apply(RDD.scala:910)\n\tat
org.apache.spark.rdd.RDD$$anonfun$count$1.apply(RDD.scala:910)\n\tat
org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)\n\tat
org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)\n\tat
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)\n\tat
org.apache.spark.scheduler.Task.run(Task.scala:56)\n\tat
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)\n\tat
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)\n\tat
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)\n\tat
java.lang.Thread.run(Thread.java:745)\n","Metrics":{"Host
Name":"localhost","Executor Deserialize Time":0,"Executor Run Time":36,"Result
Size":0,"JVM GC Time":0,"Result Serialization Time":0,"Memory Bytes
Spilled":0,"Disk Bytes Spilled":0,"Shuffle Read Metrics":{"Remote Blocks
Fetched":0,"Local Blocks Fetched":8,"Fetch Wait Time":0,"Remote Bytes
Read":0}}},"Task Info":{"Task ID":19,"Index":3,"Attempt":0,"Launch
Time":1422981786297,"Executor
ID":"<driver>","Host":"localhost","Locality":"PROCESS_LOCAL","Speculative":false,"Getting
Result Time":0,"Finish
Time":1422981786343,"Failed":true,"Accumulables":[]},"Task Metrics":{"Host
Name":"localhost","Executor Deserialize Time":0,"Executor Run Time":36,"Result
Size":0,"JVM GC Time":0,"Result Serialization Time":0,"Memory Bytes
Spilled":0,"Disk Bytes Spilled":0,"Shuffle Read Metrics":{"Remote Blocks
Fetched":0,"Local Blocks Fetched":8,"Fetch Wait Time":0,"Remote Bytes Read":0}}}
{"Event":"SparkListenerStageCompleted","Stage Info":{"Stage ID":2,"Stage
Attempt ID":0,"Stage Name":"count at <console>:20","Number of Tasks":8,"RDD
Info":[{"RDD ID":3,"Name":"3","Storage Level":{"Use Disk":false,"Use
Memory":false,"Use Tachyon":false,"Deserialized":false,"Replication":1},"Number
of Partitions":8,"Number of Cached Partitions":0,"Memory Size":0,"Tachyon
Size":0,"Disk Size":0},{"RDD ID":2,"Name":"2","Storage Level":{"Use
Disk":false,"Use Memory":false,"Use
Tachyon":false,"Deserialized":false,"Replication":1},"Number of
Partitions":8,"Number of Cached Partitions":0,"Memory Size":0,"Tachyon
Size":0,"Disk
Size":0}],"Details":"org.apache.spark.rdd.RDD.count(RDD.scala:910)\n$line11.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:20)\n$line11.$read$$iwC$$iwC$$iwC.<init>(<console>:25)\n$line11.$read$$iwC$$iwC.<init>(<console>:27)\n$line11.$read$$iwC.<init>(<console>:29)\n$line11.$read.<init>(<console>:31)\n$line11.$read$.<init>(<console>:35)\n$line11.$read$.<clinit>(<console>)\n$line11.$eval$.<init>(<console>:7)\n$line11.$eval$.<clinit>(<console>)\n$line11.$eval.$print(<console>)\nsun.reflect.NativeMethodAccessorImpl.invoke0(Native
Method)\nsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)\nsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\njava.lang.reflect.Method.invoke(Method.java:606)\norg.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)\norg.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)\norg.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)\norg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)","Submission
Time":1422981786296,"Completion Time":1422981786347,"Failure Reason":"Job
aborted due to stage failure: Task 3 in stage 2.0 failed 1 times, most recent
failure: Lost task 3.0 in stage 2.0 (TID 19, localhost):
java.lang.RuntimeException: got a 3, failing\n\tat
$line11.$read$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:18)\n\tat
$line11.$read$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:17)\n\tat
scala.collection.Iterator$$anon$11.next(Iterator.scala:328)\n\tat
org.apache.spark.util.Utils$.getIteratorSize(Utils.scala:1311)\n\tat
org.apache.spark.rdd.RDD$$anonfun$count$1.apply(RDD.scala:910)\n\tat
org.apache.spark.rdd.RDD$$anonfun$count$1.apply(RDD.scala:910)\n\tat
org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)\n\tat
org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)\n\tat
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)\n\tat
org.apache.spark.scheduler.Task.run(Task.scala:56)\n\tat
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)\n\tat
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)\n\tat
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)\n\tat
java.lang.Thread.run(Thread.java:745)\n\nDriver
stacktrace:","Accumulables":[]}}
-{"Event":"SparkListenerJobEnd","Job ID":1,"Job
Result":{"Result":"JobFailed","Exception":{"Message":"Job aborted due to stage
failure: Task 3 in stage 2.0 failed 1 times, most recent failure: Lost task 3.0
in stage 2.0 (TID 19, localhost): java.lang.RuntimeException: got a 3,
failing\n\tat
$line11.$read$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:18)\n\tat
$line11.$read$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:17)\n\tat
scala.collection.Iterator$$anon$11.next(Iterator.scala:328)\n\tat
org.apache.spark.util.Utils$.getIteratorSize(Utils.scala:1311)\n\tat
org.apache.spark.rdd.RDD$$anonfun$count$1.apply(RDD.scala:910)\n\tat
org.apache.spark.rdd.RDD$$anonfun$count$1.apply(RDD.scala:910)\n\tat
org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)\n\tat
org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)\n\tat
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)\n\tat
org.apache.spark.scheduler.Task.run(Task.scala:56)\n\tat
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)\n\tat
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)\n\tat
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)\n\tat
java.lang.Thread.run(Thread.java:745)\n\nDriver stacktrace:","Stack
Trace":[{"Declaring Class":"org.apache.spark.scheduler.DAGScheduler","Method
Name":"org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages","File
Name":"DAGScheduler.scala","Line Number":1214},{"Declaring
Class":"org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1","Method
Name":"apply","File Name":"DAGScheduler.scala","Line Number":1203},{"Declaring
Class":"org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1","Method
Name":"apply","File Name":"DAGScheduler.scala","Line Number":1202},{"Declaring
Class":"scala.collection.mutable.ResizableArray$class","Method
Name":"foreach","File Name":"ResizableArray.scala","Line
Number":59},{"Declaring Class":"scala.collection.mutable.ArrayBuffer","Method
Name":"foreach","File Name":"ArrayBuffer.scala","Line Number":47},{"Declaring
Class":"org.apache.spark.scheduler.DAGScheduler","Method
Name":"abortStage","File Name":"DAGScheduler.scala","Line
Number":1202},{"Declaring
Class":"org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1","Method
Name":"apply","File Name":"DAGScheduler.scala","Line Number":696},{"Declaring
Class":"org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1","Method
Name":"apply","File Name":"DAGScheduler.scala","Line Number":696},{"Declaring
Class":"scala.Option","Method Name":"foreach","File Name":"Option.scala","Line
Number":236},{"Declaring
Class":"org.apache.spark.scheduler.DAGScheduler","Method
Name":"handleTaskSetFailed","File Name":"DAGScheduler.scala","Line
Number":696},{"Declaring
Class":"org.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2","Method
Name":"applyOrElse","File Name":"DAGScheduler.scala","Line
Number":1420},{"Declaring Class":"akka.actor.Actor$class","Method
Name":"aroundReceive","File Name":"Actor.scala","Line Number":465},{"Declaring
Class":"org.apache.spark.scheduler.DAGSchedulerEventProcessActor","Method
Name":"aroundReceive","File Name":"DAGScheduler.scala","Line
Number":1375},{"Declaring Class":"akka.actor.ActorCell","Method
Name":"receiveMessage","File Name":"ActorCell.scala","Line
Number":516},{"Declaring Class":"akka.actor.ActorCell","Method
Name":"invoke","File Name":"ActorCell.scala","Line Number":487},{"Declaring
Class":"akka.dispatch.Mailbox","Method Name":"processMailbox","File
Name":"Mailbox.scala","Line Number":238},{"Declaring
Class":"akka.dispatch.Mailbox","Method Name":"run","File
Name":"Mailbox.scala","Line Number":220},{"Declaring
Class":"akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask","Method
Name":"exec","File Name":"AbstractDispatcher.scala","Line
Number":393},{"Declaring
Class":"scala.concurrent.forkjoin.ForkJoinTask","Method Name":"doExec","File
Name":"ForkJoinTask.java","Line Number":260},{"Declaring
Class":"scala.concurrent.forkjoin.ForkJoinPool$WorkQueue","Method
Name":"runTask","File Name":"ForkJoinPool.java","Line Number":1339},{"Declaring
Class":"scala.concurrent.forkjoin.ForkJoinPool","Method Name":"runWorker","File
Name":"ForkJoinPool.java","Line Number":1979},{"Declaring
Class":"scala.concurrent.forkjoin.ForkJoinWorkerThread","Method
Name":"run","File Name":"ForkJoinWorkerThread.java","Line Number":107}]}}}
+{"Event":"SparkListenerJobEnd","Job ID":1,"Job
Result":{"Result":"JobFailed","Exception":{"Message":"Job aborted due to stage
failure: Task 3 in stage 2.0 failed 1 times, most recent failure: Lost task 3.0
in stage 2.0 (TID 19, localhost): java.lang.RuntimeException: got a 3,
failing\n\tat
$line11.$read$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:18)\n\tat
$line11.$read$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:17)\n\tat
scala.collection.Iterator$$anon$11.next(Iterator.scala:328)\n\tat
org.apache.spark.util.Utils$.getIteratorSize(Utils.scala:1311)\n\tat
org.apache.spark.rdd.RDD$$anonfun$count$1.apply(RDD.scala:910)\n\tat
org.apache.spark.rdd.RDD$$anonfun$count$1.apply(RDD.scala:910)\n\tat
org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)\n\tat
org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1314)\n\tat
org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)\n\tat
org.apache.spark.scheduler.Task.run(Task.scala:56)\n\tat
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:196)\n\tat
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)\n\tat
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)\n\tat
java.lang.Thread.run(Thread.java:745)\n\nDriver stacktrace:","Stack
Trace":[]}}}
Review comment:
It seems I need to do some homework to answer this question. Many thanks for
your tough and helpful questions!
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]