Just realized that I was missing the JavaSparkContext in the import and after adding it, the error is:
Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Task not serializable: java.io.NotSerializableException: java.lang.reflect.Method at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1044) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1028) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1026) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1026) at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$submitMissingTasks(DAGScheduler.scala:771) at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$submitStage(DAGScheduler.scala:714) at org.apache.spark.scheduler.DAGScheduler$$anonfun$org$apache$spark$scheduler$DAGScheduler$$submitStage$4.apply(DAGScheduler.scala:718) at org.apache.spark.scheduler.DAGScheduler$$anonfun$org$apache$spark$scheduler$DAGScheduler$$submitStage$4.apply(DAGScheduler.scala:717) at scala.collection.immutable.List.foreach(List.scala:318) at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$submitStage(DAGScheduler.scala:717) at org.apache.spark.scheduler.DAGScheduler$$anonfun$org$apache$spark$scheduler$DAGScheduler$$submitStage$4.apply(DAGScheduler.scala:718) at org.apache.spark.scheduler.DAGScheduler$$anonfun$org$apache$spark$scheduler$DAGScheduler$$submitStage$4.apply(DAGScheduler.scala:717) at scala.collection.immutable.List.foreach(List.scala:318) at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$submitStage(DAGScheduler.scala:717) at org.apache.spark.scheduler.DAGScheduler.handleJobSubmitted(DAGScheduler.scala:698) at org.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2.applyOrElse(DAGScheduler.scala:1198) at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498) at akka.actor.ActorCell.invoke(ActorCell.scala:456) at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237) at akka.dispatch.Mailbox.run(Mailbox.scala:219) at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) Alexis On Jul 29, 2014, at 2:53 PM, Alexis Roos <alexis.r...@gmail.com> wrote: > Hello, > > I am porting a data process running in Spark from Scala to Java (8) using > Lambdas to see how practical Java 8 is. > > The first few steps are working (parsing data, creating JavaRDDs) but then it > fails while doing a cogroup between two JavaPairRDD<String, String>. > > I am getting a bunch of java.io.StreamCorruptedException: invalid type code: > 00 and ultimately the stack trace below. > > It is running on Mac OS X local mode with Java SE 8. > java version "1.8.0_11" > Java(TM) SE Runtime Environment (build 1.8.0_11-b12) > Java HotSpot(TM) 64-Bit Server VM (build 25.11-b03, mixed mode) > > Any ideas on possible root cause ?? > > Thanks, > > Alexis > > — > > Exception in thread "main" org.apache.spark.SparkException: Job aborted due > to stage failure: Task 3.0:0 failed 1 times, most recent failure: Exception > failure in TID 0 on host localhost: java.io.StreamCorruptedException: invalid > type code: 00 > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1379) > java.io.ObjectInputStream.skipCustomData(ObjectInputStream.java:1959) > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1921) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351) > > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1993) > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1918) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351) > > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1993) > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1918) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351) > > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1993) > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1918) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351) > > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1993) > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1918) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351) > java.io.ObjectInputStream.readArray(ObjectInputStream.java:1707) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1345) > > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1993) > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1918) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351) > > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1993) > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1918) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351) > > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1993) > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1918) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351) > > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1993) > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1918) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351) > java.io.ObjectInputStream.readObject(ObjectInputStream.java:371) > scala.collection.immutable.$colon$colon.readObject(List.scala:362) > sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > java.lang.reflect.Method.invoke(Method.java:483) > > java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017) > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1896) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351) > > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1993) > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1918) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351) > > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1993) > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1918) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351) > java.io.ObjectInputStream.readObject(ObjectInputStream.java:371) > scala.collection.immutable.$colon$colon.readObject(List.scala:362) > sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > java.lang.reflect.Method.invoke(Method.java:483) > > java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017) > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1896) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351) > > java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1993) > java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1918) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351) > java.io.ObjectInputStream.readObject(ObjectInputStream.java:371) > > org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:63) > > org.apache.spark.scheduler.ShuffleMapTask$.deserializeInfo(ShuffleMapTask.scala:63) > > org.apache.spark.scheduler.ShuffleMapTask.readExternal(ShuffleMapTask.scala:135) > > java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1840) > > java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1799) > java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351) > java.io.ObjectInputStream.readObject(ObjectInputStream.java:371) > > org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:63) > > org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:85) > org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:165) > > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) > > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) > java.lang.Thread.run(Thread.java:745) > Driver stacktrace: > at > org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1044) > at > org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1028) > at > org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1026) > at > scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) > at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) > at > org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1026) > at > org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:634) > at > org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:634) > at scala.Option.foreach(Option.scala:236) > at > org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:634) > at > org.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2.applyOrElse(DAGScheduler.scala:1229) > at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498) > at akka.actor.ActorCell.invoke(ActorCell.scala:456) > at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237) > at akka.dispatch.Mailbox.run(Mailbox.scala:219) > at > akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) > at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) > at > scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) > at > scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) > at > scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) > > >