[ 
https://issues.apache.org/jira/browse/SPARK-15872?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Matthew Gwynne updated SPARK-15872:
-----------------------------------
    Comment: was deleted

(was: I have had what appears to be exactly the same issue in 1.6.2. For some 
reason, I don't seem to get this issue running in local mode - only on the 
cluster (which happens to be via yarn-client mode). Is this something anyone 
else has faced? I wasn't sure whether this was some kind of environment issue - 
were you able to reproduce in a clean setting?)

> Dataset of Array of Custom case class throws MissingRequirementError
> --------------------------------------------------------------------
>
>                 Key: SPARK-15872
>                 URL: https://issues.apache.org/jira/browse/SPARK-15872
>             Project: Spark
>          Issue Type: Bug
>    Affects Versions: 1.6.1
>            Reporter: Petr Votava
>            Priority: Minor
>
> example:
> {code:scala}
> import org.apache.spark.SparkContext
> import org.apache.spark.SparkConf
> import org.apache.spark.sql.{SQLContext, Dataset}
> case class Custom(a: String)
> object Main {
>   def main(args: Array[String]) {
>     val conf = new SparkConf()
>         .setAppName("test-spark-bug")
>     val sc = new SparkContext(conf)
>     val sqlContext = new SQLContext(sc)
>     import sqlContext.implicits._
>     val arr: Seq[Custom] = Array(new Custom("a"))
>     val dataset = arr.toDS()
>     dataset.take(1).foreach(println)
>     dataset.map(x => Array(x)).take(1).foreach(println)
>   }
> }
> {code}
> throws 
> Exception in thread "main" scala.reflect.internal.MissingRequirementError: 
> class Custom not found.
>       at 
> scala.reflect.internal.MissingRequirementError$.signal(MissingRequirementError.scala:16)
>       at 
> scala.reflect.internal.MissingRequirementError$.notFound(MissingRequirementError.scala:17)
>       at 
> scala.reflect.internal.Mirrors$RootsBase.ensureClassSymbol(Mirrors.scala:90)
>       at 
> scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:119)
>       at 
> scala.reflect.internal.Mirrors$RootsBase.staticClass(Mirrors.scala:21)
>       at Main$$typecreator2$1.apply(Main.scala:18)
>       at 
> scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:231)
>       at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:231)
>       at 
> org.apache.spark.sql.SQLImplicits$$typecreator19$1.apply(SQLImplicits.scala:126)
>       at 
> scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe$lzycompute(TypeTags.scala:231)
>       at scala.reflect.api.TypeTags$WeakTypeTagImpl.tpe(TypeTags.scala:231)
>       at 
> org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$.apply(ExpressionEncoder.scala:50)
>       at 
> org.apache.spark.sql.SQLImplicits.newProductArrayEncoder(SQLImplicits.scala:126)
>       at Main$.main(Main.scala:18)
>       at Main.main(Main.scala)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:497)
>       at 
> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
>       at 
> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>       at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>       at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>       at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> expected output:
> Custom("a")
> Custom("a")



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to