I am getting this error in the spark-shell when I do . Which jar file I need to download to fix this error?
Df.show() Error scala> val df = msc.sql(query) df: org.apache.spark.sql.DataFrame = [id: int, name: string] scala> df.show() java.lang.NoClassDefFoundError: spray/json/JsonReader at com.memsql.spark.pushdown.MemSQLPhysicalRDD$.fromAbstractQueryTree(MemSQLPhy sicalRDD.scala:95) at com.memsql.spark.pushdown.MemSQLPushdownStrategy.apply(MemSQLPushdownStrateg y.scala:49) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPl anner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPl anner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala: 59) at org.apache.spark.sql.catalyst.planning.QueryPlanner.planLater(QueryPlanner.s cala:54) at org.apache.spark.sql.execution.SparkStrategies$BasicOperators$.apply(SparkSt rategies.scala:374) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPl anner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPl anner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala: 59) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLConte xt.scala:926) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:92 4) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLCo ntext.scala:930) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala :930) at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution .scala:53) at org.apache.spark.sql.DataFrame.withNewExecutionId(DataFrame.scala:1903) at org.apache.spark.sql.DataFrame.collect(DataFrame.scala:1384) at org.apache.spark.sql.DataFrame.head(DataFrame.scala:1314) at org.apache.spark.sql.DataFrame.take(DataFrame.scala:1377) at org.apache.spark.sql.DataFrame.showString(DataFrame.scala:178) at org.apache.spark.sql.DataFrame.show(DataFrame.scala:401) at org.apache.spark.sql.DataFrame.show(DataFrame.scala:362) at org.apache.spark.sql.DataFrame.show(DataFrame.scala:370) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$ iwC$$iwC$$iwC.<init>(<console>:48) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$ iwC$$iwC.<init>(<console>:53) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$ iwC.<init>(<console>:55) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.< init>(<console>:57) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init> (<console>:59) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<con sole>:61)