[ 
https://issues.apache.org/jira/browse/SPARK-2018?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14018409#comment-14018409
 ] 

Yanjie Gao commented on SPARK-2018:
-----------------------------------

Thanks for your quick reply!
I believe they  use the same jvm

Do you think this may have another reason?

How can I  debug it  to find the reason ?

Best regards !
Yanjie Gao
here is the ps -aux |grep java log

 test1      349  0.5  3.7 2945280 195456 pts/7  Sl   02:30   0:22 
/opt/ibm/java-ppc64-70//bin/java -cp 
/home/test1/spark-1.0.0-bin-hadoop2/lib::/home/test1/src/spark-1.0.0-bin-hadoop2/conf:/home/test1/src/spark-1.0.0-bin-hadoop2/lib/spark-assembly-1.0.0-hadoop2.2.0.jar:/home/test1/src/spark-1.0.0-bin-hadoop2/lib/datanucleus-rdbms-3.2.1.jar:/home/test1/src/spark-1.0.0-bin-hadoop2/lib/datanucleus-api-jdo-3.2.1.jar:/home/test1/src/spark-1.0.0-bin-hadoop2/lib/datanucleus-core-3.2.2.jar:/home/test1/src/hadoop-2.3.0-cdh5.0.0/etc/hadoop/:/home/test1/src/hadoop-2.3.0-cdh5.0.0/etc/hadoop/
 -XX:MaxPermSize=128m -Dspark.akka.logLifecycleEvents=true -Xms512m -Xmx512m 
org.apache.spark.deploy.master.Master --ip 9.114.34.69 --port 7077 --webui-port 
8080
test1      492  0.4  3.7 2946496 194432 ?      Sl   02:30   0:19 
/opt/ibm/java-ppc64-70//bin/java -cp 
/home/test1/spark-1.0.0-bin-hadoop2/lib::/home/test1/src/spark-1.0.0-bin-hadoop2/conf:/home/test1/src/spark-1.0.0-bin-hadoop2/lib/spark-assembly-1.0.0-hadoop2.2.0.jar:/home/test1/src/spark-1.0.0-bin-hadoop2/lib/datanucleus-rdbms-3.2.1.jar:/home/test1/src/spark-1.0.0-bin-hadoop2/lib/datanucleus-api-jdo-3.2.1.jar:/home/test1/src/spark-1.0.0-bin-hadoop2/lib/datanucleus-core-3.2.2.jar:/home/test1/src/hadoop-2.3.0-cdh5.0.0/etc/hadoop/:/home/test1/src/hadoop-2.3.0-cdh5.0.0/etc/hadoop/
 -XX:MaxPermSize=128m -Dspark.akka.logLifecycleEvents=true -Xms512m -Xmx512m 
org.apache.spark.deploy.worker.Worker spark://9.114.34.69:7077
test1     3160  0.0  0.0 104832  2816 pts/10   S+   03:40   0:00 grep java
test1    13163  0.1  2.7 1631232 144256 ?      Sl   Jun02   2:00 
/opt/ibm/java-ppc64-70/bin/java -Dproc_namenode -Xmx1000m 
-Djava.net.preferIPv4Stack=true 
-Dhadoop.log.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0/logs 
-Dhadoop.log.file=hadoop.log 
-Dhadoop.home.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0 -Dhadoop.id.str=test1 
-Dhadoop.root.logger=INFO,console 
-Djava.library.path=/home/test1/src/hadoop-2.3.0-cdh5.0.0/lib/native 
-Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true 
-Djava.net.preferIPv4Stack=true -Djava.net.preferIPv4Stack=true 
-Dhadoop.log.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0/logs 
-Dhadoop.log.file=hadoop-test1-namenode-p7hvs7br16.log 
-Dhadoop.home.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0 -Dhadoop.id.str=test1 
-Dhadoop.root.logger=INFO,RFA 
-Djava.library.path=/home/test1/src/hadoop-2.3.0-cdh5.0.0/lib/native 
-Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true 
-Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender 
-Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender 
-Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender 
-Dhadoop.security.logger=INFO,RFAS 
org.apache.hadoop.hdfs.server.namenode.NameNode
test1    13328  0.0  2.1 1636160 113152 ?      Sl   Jun02   1:39 
/opt/ibm/java-ppc64-70/bin/java -Dproc_datanode -Xmx1000m 
-Djava.net.preferIPv4Stack=true 
-Dhadoop.log.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0/logs 
-Dhadoop.log.file=hadoop.log 
-Dhadoop.home.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0 -Dhadoop.id.str=test1 
-Dhadoop.root.logger=INFO,console 
-Djava.library.path=/home/test1/src/hadoop-2.3.0-cdh5.0.0/lib/native 
-Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true 
-Djava.net.preferIPv4Stack=true -Djava.net.preferIPv4Stack=true 
-Dhadoop.log.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0/logs 
-Dhadoop.log.file=hadoop-test1-datanode-p7hvs7br16.log 
-Dhadoop.home.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0 -Dhadoop.id.str=test1 
-Dhadoop.root.logger=INFO,RFA 
-Djava.library.path=/home/test1/src/hadoop-2.3.0-cdh5.0.0/lib/native 
-Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true -server 
-Dhadoop.security.logger=ERROR,RFAS -Dhadoop.security.logger=ERROR,RFAS 
-Dhadoop.security.logger=ERROR,RFAS -Dhadoop.security.logger=INFO,RFAS 
org.apache.hadoop.hdfs.server.datanode.DataNode
test1    13474  0.0  2.1 1624960 113408 ?      Sl   Jun02   0:35 
/opt/ibm/java-ppc64-70/bin/java -Dproc_secondarynamenode -Xmx1000m 
-Djava.net.preferIPv4Stack=true 
-Dhadoop.log.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0/logs 
-Dhadoop.log.file=hadoop.log 
-Dhadoop.home.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0 -Dhadoop.id.str=test1 
-Dhadoop.root.logger=INFO,console 
-Djava.library.path=/home/test1/src/hadoop-2.3.0-cdh5.0.0/lib/native 
-Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true 
-Djava.net.preferIPv4Stack=true -Djava.net.preferIPv4Stack=true 
-Dhadoop.log.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0/logs 
-Dhadoop.log.file=hadoop-test1-secondarynamenode-p7hvs7br16.log 
-Dhadoop.home.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0 -Dhadoop.id.str=test1 
-Dhadoop.root.logger=INFO,RFA 
-Djava.library.path=/home/test1/src/hadoop-2.3.0-cdh5.0.0/lib/native 
-Dhadoop.policy.file=hadoop-policy.xml -Djava.net.preferIPv4Stack=true 
-Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender 
-Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender 
-Dhadoop.security.logger=INFO,RFAS -Dhdfs.audit.logger=INFO,NullAppender 
-Dhadoop.security.logger=INFO,RFAS 
org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode
test1    13702  0.3  2.4 1666112 124544 ?      Sl   Jun02   6:47 
/opt/ibm/java-ppc64-70/bin/java -Dproc_resourcemanager -Xmx1000m 
-Dhadoop.log.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0/logs 
-Dyarn.log.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0/logs 
-Dhadoop.log.file=yarn-test1-resourcemanager-p7hvs7br16.log 
-Dyarn.log.file=yarn-test1-resourcemanager-p7hvs7br16.log -Dyarn.home.dir= 
-Dyarn.id.str=test1 -Dhadoop.root.logger=INFO,RFA -Dyarn.root.logger=INFO,RFA 
-Djava.library.path=/home/test1/src/hadoop-2.3.0-cdh5.0.0/lib/native 
-Dyarn.policy.file=hadoop-policy.xml 
-Dhadoop.log.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0/logs 
-Dyarn.log.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0/logs 
-Dhadoop.log.file=yarn-test1-resourcemanager-p7hvs7br16.log 
-Dyarn.log.file=yarn-test1-resourcemanager-p7hvs7br16.log 
-Dyarn.home.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0 
-Dhadoop.home.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0 
-Dhadoop.root.logger=INFO,RFA -Dyarn.root.logger=INFO,RFA 
-Djava.library.path=/home/test1/src/hadoop-2.3.0-cdh5.0.0/lib/native -classpath 
/home/test1/src/hadoop-2.3.0-cdh5.0.0/etc/hadoop/:/home/test1/src/hadoop-2.3.0-cdh5.0.0/etc/hadoop/:/home/test1/src/hadoop-2.3.0-cdh5.0.0/etc/hadoop/:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/common/lib/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/common/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/hdfs:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/hdfs/lib/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/hdfs/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/yarn/lib/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/yarn/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/mapreduce/lib/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/mapreduce/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/contrib/capacity-scheduler/*.jar:/home/test1/src/hadoop-2.3.0-cdh5.0.0/contrib/capacity-scheduler/*.jar:/home/test1/src/hadoop-2.3.0-cdh5.0.0/contrib/capacity-scheduler/*.jar:/home/test1/src/hadoop-2.3.0-cdh5.0.0/contrib/capacity-scheduler/*.jar:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/yarn/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/yarn/lib/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/etc/hadoop//rm-config/log4j.properties
 org.apache.hadoop.yarn.server.resourcemanager.ResourceManager
test1    13800  0.1  1.9 1633664 98560 ?       Sl   Jun02   3:03 
/opt/ibm/java-ppc64-70/bin/java -Dproc_nodemanager -Xmx1000m 
-Dhadoop.log.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0/logs 
-Dyarn.log.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0/logs 
-Dhadoop.log.file=yarn-test1-nodemanager-p7hvs7br16.log 
-Dyarn.log.file=yarn-test1-nodemanager-p7hvs7br16.log -Dyarn.home.dir= 
-Dyarn.id.str=test1 -Dhadoop.root.logger=INFO,RFA -Dyarn.root.logger=INFO,RFA 
-Djava.library.path=/home/test1/src/hadoop-2.3.0-cdh5.0.0/lib/native 
-Dyarn.policy.file=hadoop-policy.xml -server 
-Dhadoop.log.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0/logs 
-Dyarn.log.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0/logs 
-Dhadoop.log.file=yarn-test1-nodemanager-p7hvs7br16.log 
-Dyarn.log.file=yarn-test1-nodemanager-p7hvs7br16.log 
-Dyarn.home.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0 
-Dhadoop.home.dir=/home/test1/src/hadoop-2.3.0-cdh5.0.0 
-Dhadoop.root.logger=INFO,RFA -Dyarn.root.logger=INFO,RFA 
-Djava.library.path=/home/test1/src/hadoop-2.3.0-cdh5.0.0/lib/native -classpath 
/home/test1/src/hadoop-2.3.0-cdh5.0.0/etc/hadoop/:/home/test1/src/hadoop-2.3.0-cdh5.0.0/etc/hadoop/:/home/test1/src/hadoop-2.3.0-cdh5.0.0/etc/hadoop/:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/common/lib/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/common/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/hdfs:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/hdfs/lib/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/hdfs/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/yarn/lib/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/yarn/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/mapreduce/lib/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/mapreduce/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/contrib/capacity-scheduler/*.jar:/home/test1/src/hadoop-2.3.0-cdh5.0.0/contrib/capacity-scheduler/*.jar:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/yarn/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/share/hadoop/yarn/lib/*:/home/test1/src/hadoop-2.3.0-cdh5.0.0/etc/hadoop//nm-config/log4j.properties
 org.apache.hadoop.yarn.server.nodemanager.NodeManager
songdm   29650  0.4  5.1 3032704 264704 ?      Sl   May27  50:13 
/opt/ibm/java-ppc64-70//bin/java -cp 
::/home/songdm/spark/conf:/home/songdm/spark/assembly/target/scala-2.10/spark-assembly-1.0.0-SNAPSHOT-hadoop2.2.0.jar:/home/songdm/hadoop-2.2.0/etc/hadoop/:/home/songdm/hadoop-2.2.0/etc/hadoop/
 -XX:MaxPermSize=128m -Djava.library.path= -Xms512m -Xmx512m 
org.apache.spark.deploy.SparkSubmit --master local[*] --class 
org.apache.spark.examples.streaming.HdfsWordCount 
/home/songdm/spark/examples/target/scala-2.10/spark-examples-1.0.0-SNAPSHOT-hadoop2.2.0.jar
 ./kk

> Big-Endian (IBM Power7)  Spark Serialization issue
> --------------------------------------------------
>
>                 Key: SPARK-2018
>                 URL: https://issues.apache.org/jira/browse/SPARK-2018
>             Project: Spark
>          Issue Type: Bug
>    Affects Versions: 1.0.0
>         Environment: hardware : IBM Power7
> OS:Linux version 2.6.32-358.el6.ppc64 
> ([email protected]) (gcc version 4.4.7 20120313 (Red 
> Hat 4.4.7-3) (GCC) ) #1 SMP Tue Jan 29 11:43:27 EST 2013
> JDK: Java(TM) SE Runtime Environment (build pxp6470sr5-20130619_01(SR5))
> IBM J9 VM (build 2.6, JRE 1.7.0 Linux ppc64-64 Compressed References 
> 20130617_152572 (JIT enabled, AOT enabled)
> Hadoop:Hadoop-0.2.3-CDH5.0
> Spark:Spark-1.0.0 or Spark-0.9.1
> spark-env.sh:
> export JAVA_HOME=/opt/ibm/java-ppc64-70/
> export SPARK_MASTER_IP=9.114.34.69
> export SPARK_WORKER_MEMORY=10000m
> export SPARK_CLASSPATH=/home/test1/spark-1.0.0-bin-hadoop2/lib
> export  STANDALONE_SPARK_MASTER_HOST=9.114.34.69
> #export SPARK_JAVA_OPTS=' -Xdebug 
> -Xrunjdwp:transport=dt_socket,address=99999,server=y,suspend=n '
>            Reporter: Yanjie Gao
>
> We have an application run on Spark on Power7 System .
> But we meet an important issue about serialization.
> The example HdfsWordCount can meet the problem.
> ./bin/run-example      org.apache.spark.examples.streaming.HdfsWordCount 
> localdir
> We used Power7 (Big-Endian arch) and Redhat  6.4.
> Big-Endian  is the main cause since the example ran successfully in another 
> Power-based Little Endian setup.
> here is the exception stack and log:
> Spark Executor Command: "/opt/ibm/java-ppc64-70//bin/java" "-cp" 
> "/home/test1/spark-1.0.0-bin-hadoop2/lib::/home/test1/src/spark-1.0.0-bin-hadoop2/conf:/home/test1/src/spark-1.0.0-bin-hadoop2/lib/spark-assembly-1.0.0-hadoop2.2.0.jar:/home/test1/src/spark-1.0.0-bin-hadoop2/lib/datanucleus-rdbms-3.2.1.jar:/home/test1/src/spark-1.0.0-bin-hadoop2/lib/datanucleus-api-jdo-3.2.1.jar:/home/test1/src/spark-1.0.0-bin-hadoop2/lib/datanucleus-core-3.2.2.jar:/home/test1/src/hadoop-2.3.0-cdh5.0.0/etc/hadoop/:/home/test1/src/hadoop-2.3.0-cdh5.0.0/etc/hadoop/"
>  "-XX:MaxPermSize=128m"  "-Xdebug" 
> "-Xrunjdwp:transport=dt_socket,address=99999,server=y,suspend=n" "-Xms512M" 
> "-Xmx512M" "org.apache.spark.executor.CoarseGrainedExecutorBackend" 
> "akka.tcp://[email protected]:60253/user/CoarseGrainedScheduler" "2" 
> "p7hvs7br16" "4" "akka.tcp://sparkWorker@p7hvs7br16:59240/user/Worker" 
> "app-20140604023054-0000"
> ========================================
> 14/06/04 02:31:20 WARN util.NativeCodeLoader: Unable to load native-hadoop 
> library for your platform... using builtin-java classes where applicable
> 14/06/04 02:31:21 INFO spark.SecurityManager: Changing view acls to: 
> test1,yifeng
> 14/06/04 02:31:21 INFO spark.SecurityManager: SecurityManager: authentication 
> disabled; ui acls disabled; users with view permissions: Set(test1, yifeng)
> 14/06/04 02:31:22 INFO slf4j.Slf4jLogger: Slf4jLogger started
> 14/06/04 02:31:22 INFO Remoting: Starting remoting
> 14/06/04 02:31:22 INFO Remoting: Remoting started; listening on addresses 
> :[akka.tcp://sparkExecutor@p7hvs7br16:39658]
> 14/06/04 02:31:22 INFO Remoting: Remoting now listens on addresses: 
> [akka.tcp://sparkExecutor@p7hvs7br16:39658]
> 14/06/04 02:31:22 INFO executor.CoarseGrainedExecutorBackend: Connecting to 
> driver: akka.tcp://[email protected]:60253/user/CoarseGrainedScheduler
> 14/06/04 02:31:22 INFO worker.WorkerWatcher: Connecting to worker 
> akka.tcp://sparkWorker@p7hvs7br16:59240/user/Worker
> 14/06/04 02:31:23 INFO worker.WorkerWatcher: Successfully connected to 
> akka.tcp://sparkWorker@p7hvs7br16:59240/user/Worker
> 14/06/04 02:31:24 INFO executor.CoarseGrainedExecutorBackend: Successfully 
> registered with driver
> 14/06/04 02:31:24 INFO spark.SecurityManager: Changing view acls to: 
> test1,yifeng
> 14/06/04 02:31:24 INFO spark.SecurityManager: SecurityManager: authentication 
> disabled; ui acls disabled; users with view permissions: Set(test1, yifeng)
> 14/06/04 02:31:24 INFO slf4j.Slf4jLogger: Slf4jLogger started
> 14/06/04 02:31:24 INFO Remoting: Starting remoting
> 14/06/04 02:31:24 INFO Remoting: Remoting started; listening on addresses 
> :[akka.tcp://spark@p7hvs7br16:58990]
> 14/06/04 02:31:24 INFO Remoting: Remoting now listens on addresses: 
> [akka.tcp://spark@p7hvs7br16:58990]
> 14/06/04 02:31:24 INFO spark.SparkEnv: Connecting to MapOutputTracker: 
> akka.tcp://[email protected]:60253/user/MapOutputTracker
> 14/06/04 02:31:25 INFO spark.SparkEnv: Connecting to BlockManagerMaster: 
> akka.tcp://[email protected]:60253/user/BlockManagerMaster
> 14/06/04 02:31:25 INFO storage.DiskBlockManager: Created local directory at 
> /tmp/spark-local-20140604023125-3f61
> 14/06/04 02:31:25 INFO storage.MemoryStore: MemoryStore started with capacity 
> 307.2 MB.
> 14/06/04 02:31:25 INFO network.ConnectionManager: Bound socket to port 39041 
> with id = ConnectionManagerId(p7hvs7br16,39041)
> 14/06/04 02:31:25 INFO storage.BlockManagerMaster: Trying to register 
> BlockManager
> 14/06/04 02:31:25 INFO storage.BlockManagerMaster: Registered BlockManager
> 14/06/04 02:31:25 INFO spark.HttpFileServer: HTTP File server directory is 
> /tmp/spark-7bce4e43-2833-4666-93af-bd97c327497b
> 14/06/04 02:31:25 INFO spark.HttpServer: Starting HTTP Server
> 14/06/04 02:31:25 INFO server.Server: jetty-8.y.z-SNAPSHOT
> 14/06/04 02:31:26 INFO server.AbstractConnector: Started 
> [email protected]:39958
> 14/06/04 02:31:26 INFO executor.CoarseGrainedExecutorBackend: Got assigned 
> task 2
> 14/06/04 02:31:26 INFO executor.Executor: Running task ID 2
> 14/06/04 02:31:26 ERROR executor.Executor: Exception in task ID 2
> java.io.InvalidClassException: scala.reflect.ClassTag$$anon$1; local class 
> incompatible: stream classdesc serialVersionUID = -8102093212602380348, local 
> class serialVersionUID = -4937928798201944954
>         at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:678)
>         at 
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1678)
>         at 
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1573)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1827)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at 
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2047)
>         at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1971)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1854)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at 
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2047)
>         at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1971)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1854)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at java.io.ObjectInputStream.readObject(ObjectInputStream.java:409)
>         at scala.collection.immutable.$colon$colon.readObject(List.scala:362)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:76)
>         at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:607)
>         at 
> java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1078)
>         at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1949)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1854)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at 
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2047)
>         at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1971)
> at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1854)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at java.io.ObjectInputStream.readObject(ObjectInputStream.java:409)
>         at scala.collection.immutable.$colon$colon.readObject(List.scala:362)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:76)
>         at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:607)
>         at 
> java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1078)
>         at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1949)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1854)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at 
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2047)
>         at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1971)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1854)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at 
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2047)
>         at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1971)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1854)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at java.io.ObjectInputStream.readObject(ObjectInputStream.java:409)
>         at scala.collection.immutable.$colon$colon.readObject(List.scala:362)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:76)
>         at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:607)
>         at 
> java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1078)
>         at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1949)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1854)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at 
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2047)
>         at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1971)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1854)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at 
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2047)
>         at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1971)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1854)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at java.io.ObjectInputStream.readObject(ObjectInputStream.java:409)
>         at scala.collection.immutable.$colon$colon.readObject(List.scala:362)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:76)
>         at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:607)
>         at 
> java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1078)
>         at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1949)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1854)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at 
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2047)
>         at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1971)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1854)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at 
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2047)
>         at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1971)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1854)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at java.io.ObjectInputStream.readObject(ObjectInputStream.java:409)
>         at scala.collection.immutable.$colon$colon.readObject(List.scala:362)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:76)
>         at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:607)
>    at java.lang.reflect.Method.invoke(Method.java:607)
>         at 
> java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1078)
>         at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1949)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1854)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at 
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2047)
>         at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1971)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1854)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at java.io.ObjectInputStream.readObject(ObjectInputStream.java:409)
>         at 
> org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:63)
>         at 
> org.apache.spark.scheduler.ResultTask$.deserializeInfo(ResultTask.scala:61)
>         at 
> org.apache.spark.scheduler.ResultTask.readExternal(ResultTask.scala:141)
>         at 
> java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1893)
>         at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1852)
>         at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1406)
>         at java.io.ObjectInputStream.readObject(ObjectInputStream.java:409)
>         at 
> org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:63)
>         at 
> org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:85)
>         at 
> org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:169)
>         at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>         at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>         at java.lang.Thread.run(Thread.java:781)
> 14/06/04 02:31:26 ERROR executor.CoarseGrainedExecutorBackend: Driver 
> Disassociated [akka.tcp://sparkExecutor@p7hvs7br16:39658] -> 
> [akka.tcp://[email protected]:60253] disassociated! Shutting down.



--
This message was sent by Atlassian JIRA
(v6.2#6252)

Reply via email to