bq. Required field 'X' is unset! Struct:Y Can you check your class Y and fix the above ?
Cheers On Thu, Oct 23, 2014 at 3:55 PM, ankits <ankitso...@gmail.com> wrote: > I am simply catching all exceptions (like case e:Throwable => > println("caught: "+e) ) > > Here is the stack trace: > > 2014-10-23 15:51:10,766 ERROR [] Exception in task 1.0 in stage 1.0 (TID > 1) > java.io.IOException: org.apache.thrift.protocol.TProtocolException: > Required > field 'X' is unset! Struct:Y(id:, ts:1409094360004, type:NON, > response_time:2, now:1409094360, env_type:PROD,....) > at com.A.thrift.Y.writeObject(Y.java:8489) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) > at > > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:606) > at > java.io.ObjectStreamClass.invokeWriteObject(ObjectStreamClass.java:988) > at > java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1495) > at > > java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1431) > at > java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1177) > at > java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1547) > at > java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1508) > at > > java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1431) > at > java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1177) > at > java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:347) > at > > org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:42) > at > > org.apache.spark.storage.DiskBlockObjectWriter.write(BlockObjectWriter.scala:195) > at > > org.apache.spark.shuffle.hash.HashShuffleWriter$$anonfun$write$1.apply(HashShuffleWriter.scala:67) > at > > org.apache.spark.shuffle.hash.HashShuffleWriter$$anonfun$write$1.apply(HashShuffleWriter.scala:65) > at scala.collection.Iterator$class.foreach(Iterator.scala:727) > at scala.collection.AbstractIterator.foreach(Iterator.scala:1157) > at > > org.apache.spark.shuffle.hash.HashShuffleWriter.write(HashShuffleWriter.scala:65) > at > org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:68) > at > org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41) > at org.apache.spark.scheduler.Task.run(Task.scala:54) > at > org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:177) > at > > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) > at > > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) > at java.lang.Thread.run(Thread.java:745) > Caused by: org.apache.thrift.protocol.TProtocolException: Required field > 'X' > is unset! Struct:Y(id:, ts:1409094360004, type:NON, response_time:2, > now:1409094360, env_type:PROD, ...) > at com.A.thrift.Y.validate(Y:8428) > at com.A.thrift.Y$YStandardScheme.write(Y.java:9359) > at > com.A.thrift.Y$FlatAdserverEventStandardScheme.write(Y.java:8509) > at com.A.thrift.Y.write(Y.java:7646) > at com.A.thrift.Y.writeObject(Y.java:8487) > ... 27 more > 2014-10-23 15:51:10,766 11234 ERROR [] Exception in task 0.0 in stage 1.0 > (TID 0) > java.io.IOException: org.apache.thrift.protocol.TProtocolException: > Required > field 'X' is unset! Struct:Y(id:, ts:1409094360004, type:NON, > response_time:2, now:1409094360, ...) > at com.A.thrift.YwriteObject(Y.java:8489) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) > at > > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:606) > at > java.io.ObjectStreamClass.invokeWriteObject(ObjectStreamClass.java:988) > at > java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1495) > at > > java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1431) > at > java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1177) > at > java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1547) > at > java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1508) > at > > java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1431) > at > java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1177) > at > java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:347) > at > > org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:42) > at > > org.apache.spark.storage.DiskBlockObjectWriter.write(BlockObjectWriter.scala:195) > at > > org.apache.spark.shuffle.hash.HashShuffleWriter$$anonfun$write$1.apply(HashShuffleWriter.scala:67) > at > > org.apache.spark.shuffle.hash.HashShuffleWriter$$anonfun$write$1.apply(HashShuffleWriter.scala:65) > at scala.collection.Iterator$class.foreach(Iterator.scala:727) > at scala.collection.AbstractIterator.foreach(Iterator.scala:1157) > at > > org.apache.spark.shuffle.hash.HashShuffleWriter.write(HashShuffleWriter.scala:65) > at > org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:68) > at > org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41) > at org.apache.spark.scheduler.Task.run(Task.scala:54) > at > org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:177) > at > > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) > at > > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) > at java.lang.Thread.run(Thread.java:745) > 2014-10-23 15:51:10,791 ERROR [] Task 0 in stage 1.0 failed 1 times; > aborting job > > > > -- > View this message in context: > http://apache-spark-user-list.1001560.n3.nabble.com/Exceptions-not-caught-tp17157p17159.html > Sent from the Apache Spark User List mailing list archive at Nabble.com. > > --------------------------------------------------------------------- > To unsubscribe, e-mail: user-unsubscr...@spark.apache.org > For additional commands, e-mail: user-h...@spark.apache.org > >