Ravi Ramadoss created ZEPPELIN-1205:
---------------------------------------

             Summary: zeppelin <= 0.7 uses guava version 14.01 which conflicts 
with spark-cassandra connector
                 Key: ZEPPELIN-1205
                 URL: https://issues.apache.org/jira/browse/ZEPPELIN-1205
             Project: Zeppelin
          Issue Type: Bug
          Components: build, zeppelin-interpreter
    Affects Versions: 0.6.0, 0.7.0
         Environment: mac OSX El capitan
            Reporter: Ravi Ramadoss


Spark uses Guava 14.0.1 version, which is not compatible with 
spark-cassandra-connector:1.5.1-s_2.10, spark 1.6, hadoop 2.6 and cassandra 
3.7. The issue is resolved once the guava version is changed from 14.0.1 to 
16.0.1 in zeppelin/spark/pom.xml

*Built using*
-------------
mvn clean package -DskipTests -Pcassandra-spark-1.5 -Phadoop-2.6

*Config*
--------
zeppelin-env.sh
export SPARK_HOME="/Users/raviramadoss/Downloads/spark-1.6.0-bin-without-hadoop"
# export SPARK_SUBMIT_OPTIONS                   # (optional) extra options to 
pass to spark submit. eg) "--driver-memory 512M --executor-memory 1G".
export SPARK_SUBMIT_OPTIONS="--packages 
datastax:spark-cassandra-connector:1.5.1-s_2.10"


*code*
-------
%spark
sc.stop 
import com.datastax.spark.connector._, org.apache.spark.SparkContext, 
org.apache.spark.SparkContext._, org.apache.spark.SparkConf
val conf = new SparkConf(true).set("spark.cassandra.connection.host", 
"localhost")
val sc = new SparkContext(conf)
val test_spark_rdd = sc.cassandraTable("dev", "emp")
test_spark_rdd.first

*Error*
------
import com.datastax.spark.connector._
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
conf: org.apache.spark.SparkConf = org.apache.spark.SparkConf@531a41eb
sc: org.apache.spark.SparkContext = org.apache.spark.SparkContext@47e98241
test_spark_rdd: 
com.datastax.spark.connector.rdd.CassandraTableScanRDD[com.datastax.spark.connector.CassandraRow]
 = CassandraTableScanRDD[0] at RDD at CassandraRDD.scala:15
java.lang.IllegalStateException: Detected Guava issue #1635 which indicates 
that a version of Guava less than 16.01 is in use.  This introduces codec 
resolution issues and potentially other incompatibility issues in the driver.  
Please upgrade to Guava 16.01 or later.
        at 
com.datastax.driver.core.SanityChecks.checkGuava(SanityChecks.java:62)
        at com.datastax.driver.core.SanityChecks.check(SanityChecks.java:36)
        at com.datastax.driver.core.Cluster.<clinit>(Cluster.java:67)
        at 
com.datastax.spark.connector.cql.DefaultConnectionFactory$.clusterBuilder(CassandraConnectionFactory.scala:35)
        at 
com.datastax.spark.connector.cql.DefaultConnectionFactory$.createCluster(CassandraConnectionFactory.scala:87)
        at 
com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala:153)
        at 
com.datastax.spark.connector.cql.CassandraConnector$$anonfun$2.apply(CassandraConnector.scala:148)
        at 
com.datastax.spark.connector.cql.CassandraConnector$$anonfun$2.apply(CassandraConnector.scala:148)
        at 
com.datastax.spark.connector.cql.RefCountedCache.createNewValueAndKeys(RefCountedCache.scala:31)
        at 
com.datastax.spark.connector.cql.RefCountedCache.acquire(RefCountedCache.scala:56)
        at 
com.datastax.spark.connector.cql.CassandraConnector.openSession(CassandraConnector.scala:81)
        at 
com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:109)
        at 
com.datastax.spark.connector.cql.CassandraConnector.withClusterDo(CassandraConnector.scala:120)
        at 
com.datastax.spark.connector.cql.Schema$.fromCassandra(Schema.scala:254)
        at 
com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.tableDef(CassandraTableRowReaderProvider.scala:51)
        at 
com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef$lzycompute(CassandraTableScanRDD.scala:59)
        at 
com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef(CassandraTableScanRDD.scala:59)
        at 
com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.verify(CassandraTableRowReaderProvider.scala:146)
        at 
com.datastax.spark.connector.rdd.CassandraTableScanRDD.verify(CassandraTableScanRDD.scala:59)
        at 
com.datastax.spark.connector.rdd.CassandraTableScanRDD.getPartitions(CassandraTableScanRDD.scala:143)
        at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
        at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
        at scala.Option.getOrElse(Option.scala:120)
        at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
        at org.apache.spark.rdd.RDD$$anonfun$take$1.apply(RDD.scala:1293)
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
        at org.apache.spark.rdd.RDD.withScope(RDD.scala:316)
        at org.apache.spark.rdd.RDD.take(RDD.scala:1288)
        at 
com.datastax.spark.connector.rdd.CassandraRDD.take(CassandraRDD.scala:121)
        at 
com.datastax.spark.connector.rdd.CassandraRDD.take(CassandraRDD.scala:122)
        at org.apache.spark.rdd.RDD$$anonfun$first$1.apply(RDD.scala:1328)
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
        at org.apache.spark.rdd.RDD.withScope(RDD.scala:316)
        at org.apache.spark.rdd.RDD.first(RDD.scala:1327)
        at 
$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:42)
        at 
$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:47)
        at 
$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:49)
        at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:51)
        at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:53)
        at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:55)
        at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:57)
        at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:59)
        at $iwC$$iwC$$iwC$$iwC.<init>(<console>:61)
        at $iwC$$iwC$$iwC.<init>(<console>:63)
        at $iwC$$iwC.<init>(<console>:65)
        at $iwC.<init>(<console>:67)
        at <init>(<console>:69)
        at .<init>(<console>:73)
        at .<clinit>(<console>)
        at .<init>(<console>:7)
        at .<clinit>(<console>)
        at $print(<console>)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:497)
        at 
org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
        at 
org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346)
        at 
org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:497)
        at org.apache.zeppelin.spark.Utils.invokeMethod(Utils.java:38)
        at 
org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:704)
        at 
org.apache.zeppelin.spark.SparkInterpreter.interpretInput(SparkInterpreter.java:912)
        at 
org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:858)
        at 
org.apache.zeppelin.spark.SparkInterpreter.interpret(SparkInterpreter.java:851)
        at 
org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:94)
        at 
org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:383)
        at org.apache.zeppelin.scheduler.Job.run(Job.java:176)
        at 
org.apache.zeppelin.scheduler.FIFOScheduler$1.run(FIFOScheduler.java:139)
        at 
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)
        at 
java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to