at akka.dispatch.Mailbox.run(Mailbox.scala:219)
> at
> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> at
> scala.concurrent.fork
.hadoopConfiguration)
ssc.start()
ssc.awaitTermination()
} catch {
case e: Exception => println("exception caught: " + e);
}
}
--
View this message in context:
http://apache-spark-user-list.1001560.n3.nabble.com/spark-streaming-saving-kafka-DStrea
tate ==
"COMPLETE").countByWindow(Minutes(15), Seconds(1))
count.map(cnt => "the Total count of calls in complete state in the
last 15 minutes is: " + cnt).print()
ssc.start()
ssc.awaitTermination()
} catch {
case e: Exception => println("ex
ecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)
> at
> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
> at
> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
> at
> scala.concurrent.fork
d.java:107)
--
View this message in context:
http://apache-spark-user-list.1001560.n3.nabble.com/spark-streaming-saving-kafka-DStream-into-hadoop-throws-exception-tp12202.html
Sent from the Apache Spark User List mailing list archive at