hi,when i run a query in spark sql ,there give me follow error,what's processible reason can casuse this problem

ava.io.EOFException
        at 
org.apache.spark.serializer.KryoDeserializationStream.readObject(KryoSerializer.scala:148)
        at 
org.apache.spark.sql.hbase.HBasePartitioner$$anonfun$readObject$1.apply(HBasePartitioner.scala:100)
        at 
org.apache.spark.sql.hbase.HBasePartitioner$$anonfun$readObject$1.apply(HBasePartitioner.scala:99)
        at 
org.apache.spark.util.Utils$.deserializeViaNestedStream(Utils.scala:126)
        at 
org.apache.spark.sql.hbase.HBasePartitioner.readObject(HBasePartitioner.scala:99)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:483)
        at 
java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
        at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1896)
        at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
        at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
        at 
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1993)
        at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1918)
        at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
        at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
        at 
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1993)
        at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1918)
        at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
        at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
        at java.io.ObjectInputStream.readObject(ObjectInputStream.java:371)
        at 
org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
        at 
org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
        at 
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:60)
        at 
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
        at org.apache.spark.scheduler.Task.run(Task.scala:56)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:183)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
15/01/10 01:50:25 ERROR Executor: Exception in task 1.0 in stage 0.0 (TID 1)
java.io.EOFException
        at 
org.apache.spark.serializer.KryoDeserializationStream.readObject(KryoSerializer.scala:148)
        at 
org.apache.spark.sql.hbase.HBasePartitioner$$anonfun$readObject$1.apply(HBasePartitioner.scala:100)
        at 
org.apache.spark.sql.hbase.HBasePartitioner$$anonfun$readObject$1.apply(HBasePartitioner.scala:99)
        at 
org.apache.spark.util.Utils$.deserializeViaNestedStream(Utils.scala:126)
        at 
org.apache.spark.sql.hbase.HBasePartitioner.readObject(HBasePartitioner.scala:99)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:483)
        at 
java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
        at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1896)
        at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
        at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
        at 
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1993)
        at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1918)
        at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
        at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
        at 
java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1993)
        at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1918)
        at 
java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
        at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
        at java.io.ObjectInputStream.readObject(ObjectInputStream.java:371)
        at 
org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
        at 
org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
        at 
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:60)
        at 
org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
        at org.apache.spark.scheduler.Task.run(Task.scala:56)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:183)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
15/01/10 01:50:25 INFO CoarseGrainedExecutorBackend: Got assigned task 2
15/01/10 01:50:25 INFO Executor: Running task 1.1 in stage 0.0 (TID 2)
15/01/10 01:50:25 INFO CoarseGrainedExecutorBackend: Got assigned task 3
15/01/10 01:50:25 INFO Executor: Running task 0.1 in stage 0.0 (TID 3)
15/01/10 01:50:25 INFO BlockManager: Found block broadcast_1 locally
15/01/10 01:50:25 INFO BlockManager: Found block broadcast_1 locally
15/01/10 01:50:25 INFO BlockManager: Found block broadcast_0 locally
15/01/10 01:50:25 INFO BlockManager: Found block broadcast_0 locally
15/01/10 01:50:25 ERROR Executor: Exception in task 0.1 in stage 0.0 (TID 3)




---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to