[ https://issues.apache.org/jira/browse/SPARK-8465?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14620653#comment-14620653 ]
Angel Faus commented on SPARK-8465: ----------------------------------- We are seeing a similar error.. {noformat} import org.apache.spark._ case class CaseClass ( a: String ) object ErrorCase { def generate_entry(x: Int): CaseClass = { return CaseClass("") } def main(args: Array[String]) { val sc = new SparkContext(new SparkConf()); val sqlContext : org.apache.spark.sql.SQLContext = new org.apache.spark.sql.hive.HiveContext(sc); import sqlContext.implicits._; println(sc.parallelize(Array(1)).map(generate_entry).toDF().count); } } {noformat} This will print {noformat} Exception in thread "main" scala.reflect.internal.MissingRequirementError: class CaseClass not found. at scala.reflect.internal.MissingRequirementError$.signal(MissingRequirementError.scala:16) at scala.reflect.internal.MissingRequirementError$.notFound(MissingRequirementError.scala:17) at scala.reflect.internal.Mirrors$RootsBase.ensureClassSymbol(Mirrors.scala:90) at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:119) at scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:21) at ErrorCase$$typecreator1$1.apply(error.scala:16) at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:231) at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:231) at org.apache.spark.sql.catalyst.ScalaReflection$class.localTypeOf(ScalaReflection.scala:71) at org.apache.spark.sql.catalyst.ScalaReflection$class.schemaFor(ScalaReflection.scala:59) at org.apache.spark.sql.catalyst.ScalaReflection$.schemaFor(ScalaReflection.scala:28) at org.apache.spark.sql.SQLContext.createDataFrame(SQLContext.scala:410) at org.apache.spark.sql.SQLContext$implicits$.rddToDataFrameHolder(SQLContext.scala:335) at ErrorCase$.main(error.scala:16) at ErrorCase.main(error.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:664) {noformat} This works allright in spark 1.3 > ScalaReflectionException with DataFrames in 1.4 > ----------------------------------------------- > > Key: SPARK-8465 > URL: https://issues.apache.org/jira/browse/SPARK-8465 > Project: Spark > Issue Type: Bug > Components: SQL > Affects Versions: 1.4.0 > Reporter: Chad Urso McDaniel > > We are seeing class exceptions when converting to a DataFrame. > Anyone out there with some suggestions on what is going on? > Our original intention was to use a HiveContext to write ORC and we saw the > error there and have narrowed it down. > This is an example of our code and a commented out line showing basic > non-dataframe usage is ok: > --- > def saveVisitsAsOrcFile(sqlContext: SQLContext, rdd: RDD[Visit], outputDir: > String) { > // works!: println("rdd count: " + rdd.map(_.clicks.size).sum) > import sqlContext.implicits._ > // scala.ScalaReflectionException: class com.rr.data.Visit > print("rdd.toDF.count:" + rdd > .toDF() > .count()) > --- > This runs locally, but when using spark-submit with 1.4 we get: > Exception in thread "main" scala.ScalaReflectionException: class > com.rr.data.Visit in JavaMirror with > sun.misc.Launcher$AppClassLoader@5c647e05 of type class > sun.misc.Launcher$AppClassLoader with classpath > [file:/home/candiru/tewfik/,file:/home/candiru/tewfik/spark-1.4.0-bin-tewfik-spark/conf/,file:/home/candiru/tewfik/spark-1.4.0-bin-tewfik-spark/lib/spark-assembly-1.4.0-hadoop2.0.0-mr1-cdh4.2.0.jar,file:/home/candiru/tewfik/spark-1.4.0-bin-tewfik-spark/lib/datanucleus-api-jdo-3.2.6.jar,file:/home/candiru/tewfik/spark-1.4.0-bin-tewfik-spark/lib/datanucleus-core-3.2.10.jar,file:/home/candiru/tewfik/spark-1.4.0-bin-tewfik-spark/lib/datanucleus-rdbms-3.2.9.jar] > and parent being sun.misc.Launcher$ExtClassLoader@1c79d093 of type class > sun.misc.Launcher$ExtClassLoader with classpath > [file:/usr/java/jdk1.8.0_05/jre/lib/ext/cldrdata.jar,file:/usr/java/jdk1.8.0_05/jre/lib/ext/dnsns.jar,file:/usr/java/jdk1.8.0_05/jre/lib/ext/jfxrt.jar,file:/usr/java/jdk1.8.0_05/jre/lib/ext/localedata.jar,file:/usr/java/jdk1.8.0_05/jre/lib/ext/nashorn.jar,file:/usr/java/jdk1.8.0_05/jre/lib/ext/sunec.jar,file:/usr/java/jdk1.8.0_05/jre/lib/ext/sunjce_provider.jar,file:/usr/java/jdk1.8.0_05/jre/lib/ext/sunpkcs11.jar,file:/usr/java/jdk1.8.0_05/jre/lib/ext/zipfs.jar] > and parent being primordial classloader with boot classpath > [/usr/java/jdk1.8.0_05/jre/lib/resources.jar:/usr/java/jdk1.8.0_05/jre/lib/rt.jar:/usr/java/jdk1.8.0_05/jre/lib/sunrsasign.jar:/usr/java/jdk1.8.0_05/jre/lib/jsse.jar:/usr/java/jdk1.8.0_05/jre/lib/jce.jar:/usr/java/jdk1.8.0_05/jre/lib/charsets.jar:/usr/java/jdk1.8.0_05/jre/lib/jfr.jar:/usr/java/jdk1.8.0_05/jre/classes] > not found. > at > scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:123) > at > scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:22) > at > com.rr.data.visits.orc.OrcReadWrite$$typecreator2$1.apply(OrcReadWrite.scala:36) > at > scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:232) > at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:232) > at > org.apache.spark.sql.catalyst.ScalaReflection$class.localTypeOf(ScalaReflection.scala:71) > at > org.apache.spark.sql.catalyst.ScalaReflection$class.schemaFor(ScalaReflection.scala:59) > at > org.apache.spark.sql.catalyst.ScalaReflection$.schemaFor(ScalaReflection.scala:28) > at > org.apache.spark.sql.SQLContext.createDataFrame(SQLContext.scala:410) > at > org.apache.spark.sql.SQLContext$implicits$.rddToDataFrameHolder(SQLContext.scala:335) > at > com.rr.data.visits.orc.OrcReadWrite$.saveVisitsAsOrcFile(OrcReadWrite.scala:36) > at > com.rr.data.visits.VisitSequencerRunner$.main(VisitSequencerRunner.scala:43) > at > com.rr.data.visits.VisitSequencerRunner.main(VisitSequencerRunner.scala) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:483) > at > org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:664) > at > org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:169) > at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:192) > at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:111) > at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) > We're using the normal command line: > --- > bin/spark-submit --properties-file ./spark-submit.conf --class > com.rr.data.visits.VisitSequencerRunner > ./mvt-master-SNAPSHOT-jar-with-dependencies.jar > --- > Our jar contains both com.rr.data.visits.orc.OrcReadWrite (which you can see > in the stack trace) and the unfound com.rr.data.Visit. -- This message was sent by Atlassian JIRA (v6.3.4#6332) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org