Hi Team, I'm getting below exception. Could you please me to resolve this issue.
Below is my piece of code val rdd = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat], classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable], classOf[org.apache.hadoop.hbase.client.Result]) var s =rdd.map(x => x._2) var a = test(s.collect) def test(s:Array[org.apache.hadoop.hbase.client.Result]) { var invRecords = HashMap[String, HashMap[String, String]]() s foreach (x => { var invValues = HashMap[String, String]() x rawCells () foreach (y => { invValues += Bytes.toString(y getQualifier) -> Bytes.toString(y getValue) }) invRecords += Bytes.toString(x getRow) -> invValues }) println("********************* === "+invRecords.size) } *java.io.NotSerializableException: org.apache.hadoop.hbase.client.Result* at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1183) at java.io.ObjectOutputStream.writeArray(ObjectOutputStream.java:1377) at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1173) at java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:347) at org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:42) at org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:71) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:197) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1146) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:701) 2014-09-24 20:57:29,703 WARN [Result resolver thread-0] scheduler.TaskSetManager (Logging.scala:logWarning(70)) - Lost TID 0 (task 0.0:0) 2014-09-24 20:57:29,717 ERROR [Result resolver thread-0] scheduler.TaskSetManager (Logging.scala:logError(74)) - Task 0.0:0 had a not serializable result: java.io.NotSerializableException: org.apache.hadoop.hbase.client.Result; not retrying 2014-09-24 20:57:29,722 INFO [main] scheduler.DAGScheduler (Logging.scala:logInfo(58)) - Failed to run collect at HBaseTest5.scala:26 *Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Task 0.0:0 had a not serializable result: java.io.NotSerializableException: org.apache.hadoop.hbase.client.Result* at org.apache.spark.scheduler.DAGScheduler.org $apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1033) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1017) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1015) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1015) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:633) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:633) at scala.Option.foreach(Option.scala:236) at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:633) at org.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2.applyOrElse(DAGScheduler.scala:1207) at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498) at akka.actor.ActorCell.invoke(ActorCell.scala:456) at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237) at akka.dispatch.Mailbox.run(Mailbox.scala:219) at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)