adding logs from executors.

15/01/18 12:55:00 INFO NativeS3FileSystem: Opening
's3n://...../part-00000' for reading
15/01/18 12:55:00 ERROR Executor: Exception in task 0.2 in stage 0.0 (TID 2)
java.lang.ExceptionInInitializerError
        at $line49.$read$$iwC.<init>(<console>:6)
        at $line49.$read.<init>(<console>:35)
        at $line49.$read$.<init>(<console>:39)
        at $line49.$read$.<clinit>(<console>)
        at 
$line50.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$DailyStatsChartBuilder$.fromCsv(<console>:41)
        at 
$line53.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:43)
        at 
$line53.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:43)
        at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
        at scala.collection.Iterator$$anon$10.next(Iterator.scala:312)
        at scala.collection.Iterator$class.foreach(Iterator.scala:727)
        at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
        at 
scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
        at 
scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
        at 
scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
        at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
        at scala.collection.AbstractIterator.to(Iterator.scala:1157)
        at 
scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
        at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
        at 
scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
        at scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
        at org.apache.spark.rdd.RDD$$anonfun$28.apply(RDD.scala:1079)
        at org.apache.spark.rdd.RDD$$anonfun$28.apply(RDD.scala:1079)
        at 
org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1143)
        at 
org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1143)
        at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:62)
        at org.apache.spark.scheduler.Task.run(Task.scala:54)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:178)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.NullPointerException
        at $line3.$read$$iwC$$iwC.<init>(<console>:8)
        at $line3.$read$$iwC.<init>(<console>:14)
        at $line3.$read.<init>(<console>:16)
        at $line3.$read$.<init>(<console>:20)
        at $line3.$read$.<clinit>(<console>)
        ... 30 more



On Sun Jan 18 2015 at 11:01:20 PM Kevin (Sangwoo) Kim <kevin...@apache.org>
wrote:

> Hi experts,
>
> I'm getting ExceptionInInitializerError when using a class defined in REPL.
> Code is something like this:
>
> case class TEST(a: String)
> sc.textFile("~~~").map(TEST(_)).count
>
> The code above used to works well until yesterday, but suddenly for some
> reason it doesn't work with the error.
> Confirmed it still works with local mode.
>
> I'm getting headache while working into this problem during whole weekend.
> Any ideas?
>
> environment:
> aws ec2, s3
> spark v1.1.1, hadoop 2.2
>
> Attaching error logs:
> ===
>
> 15/01/18 13:54:22 INFO TaskSetManager: Lost task 0.19 in stage 0.0 (TID
> 19) on executor ip-172-16-186-181.ap-northeast-1.compute.internal:
> java.lang.ExceptionInInitializerError (null) [duplicate 5]
> 15/01/18 13:54:22 ERROR TaskSetManager: Task 0 in stage 0.0 failed 20
> times; aborting job
> 15/01/18 13:54:22 INFO TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks
> have all completed, from pool
> 15/01/18 13:54:22 INFO TaskSchedulerImpl: Cancelling stage 0
> 15/01/18 13:54:22 INFO DAGScheduler: Failed to run first at <console>:45
> org.apache.spark.SparkException: Job aborted due to stage failure: Task 0
> in stage 0.0 failed 20 times, most recent failure: Lost task 0.19 in stage
> 0.0 (TID 19, ip-172-16-186-181.ap-northeast-1.compute.internal):
> java.lang.ExceptionInInitializerError:
>         $iwC.<init>(<console>:6)
>         <init>(<console>:35)
>         .<init>(<console>:39)
>         .<clinit>(<console>)
>
> $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$DailyStatsChartBuilder$.fromCsv(<console>:41)
>
> $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:43)
>
> $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:43)
>         scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
>         scala.collection.Iterator$$anon$10.next(Iterator.scala:312)
>         scala.collection.Iterator$class.foreach(Iterator.scala:727)
>         scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
>
> scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
>
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
>
> scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
>         scala.collection.TraversableOnce$class.to
> (TraversableOnce.scala:273)
>         scala.collection.AbstractIterator.to(Iterator.scala:1157)
>
> scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
>         scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
>
> scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
>         scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
>         org.apache.spark.rdd.RDD$$anonfun$28.apply(RDD.scala:1079)
>         org.apache.spark.rdd.RDD$$anonfun$28.apply(RDD.scala:1079)
>
> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1143)
>
> org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1143)
>         org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:62)
>         org.apache.spark.scheduler.Task.run(Task.scala:54)
>
> org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:178)
>
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>         java.lang.Thread.run(Thread.java:745)
> Driver stacktrace:
> at org.apache.spark.scheduler.DAGScheduler.org
> $apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1185)
> at
> org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1174)
> at
> org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1173)
> at
> scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
> at
> org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1173)
> at
> org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688)
> at
> org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688)
> at scala.Option.foreach(Option.scala:236)
> at
> org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:688)
> at
> org.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2.applyOrElse(DAGScheduler.scala:1391)
> at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498)
> at akka.actor.ActorCell.invoke(ActorCell.scala:456)
> at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)
> at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
> at
> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>

Reply via email to