Github user rjurney commented on the pull request:

    https://github.com/apache/spark/pull/455#issuecomment-46789138
  
    Thanks, master doesn't build for me. Is there a particular commit you
    recommend using?
    
    [error]
    
    [error]   last tree to typer:
    Literal(Constant(org.apache.spark.sql.catalyst.types.PrimitiveType))
    
    [error]               symbol: null
    
    [error]    symbol definition: null
    
    [error]                  tpe:
    Class(classOf[org.apache.spark.sql.catalyst.types.PrimitiveType])
    
    [error]        symbol owners:
    
    [error]       context owners: object TestSQLContext -> package test
    
    [error]
    
    [error] == Enclosing template or block ==
    
    [error]
    
    [error] Template( // val <local TestSQLContext>: <notype> in object
    TestSQLContext, tree.tpe=org.apache.spark.sql.test.TestSQLContext.type
    
    [error]   "org.apache.spark.sql.SQLContext" // parents
    
    [error]   ValDef(
    
    [error]     private
    
    [error]     "_"
    
    [error]     <tpt>
    
    [error]     <empty>
    
    [error]   )
    
    [error]   // 2 statements
    
    [error]   DefDef( // private def readResolve(): Object in object
    TestSQLContext
    
    [error]     <method> private <synthetic>
    
    [error]     "readResolve"
    
    [error]     []
    
    [error]     List(Nil)
    
    [error]     <tpt> // tree.tpe=Object
    
    [error]     test.this."TestSQLContext" // object TestSQLContext in package
    test, tree.tpe=org.apache.spark.sql.test.TestSQLContext.type
    
    [error]   )
    
    [error]   DefDef( // def <init>():
    org.apache.spark.sql.test.TestSQLContext.type in object TestSQLContext
    
    [error]     <method>
    
    [error]     "<init>"
    
    [error]     []
    
    [error]     List(Nil)
    
    [error]     <tpt> // tree.tpe=org.apache.spark.sql.test.TestSQLContext.type
    
    [error]     Block( // tree.tpe=Unit
    
    [error]       Apply( // def <init>(sparkContext:
    org.apache.spark.SparkContext): org.apache.spark.sql.SQLContext in class
    SQLContext, tree.tpe=org.apache.spark.sql.SQLContext
    
    [error]         TestSQLContext.super."<init>" // def <init>(sparkContext:
    org.apache.spark.SparkContext): org.apache.spark.sql.SQLContext in class
    SQLContext, tree.tpe=(sparkContext:
    org.apache.spark.SparkContext)org.apache.spark.sql.SQLContext
    
    [error]         Apply( // def <init>(master: String,appName: String,conf:
    org.apache.spark.SparkConf): org.apache.spark.SparkContext in class
    SparkContext, tree.tpe=org.apache.spark.SparkContext
    
    [error]           new org.apache.spark.SparkContext."<init>" // def
    <init>(master: String,appName: String,conf: org.apache.spark.SparkConf):
    org.apache.spark.SparkContext in class SparkContext, tree.tpe=(master:
    String, appName: String, conf:
    org.apache.spark.SparkConf)org.apache.spark.SparkContext
    
    [error]           // 3 arguments
    
    [error]           "local"
    
    [error]           "TestSQLContext"
    
    [error]           Apply( // def <init>(): org.apache.spark.SparkConf in
    class SparkConf, tree.tpe=org.apache.spark.SparkConf
    
    [error]             new org.apache.spark.SparkConf."<init>" // def
    <init>(): org.apache.spark.SparkConf in class SparkConf,
    tree.tpe=()org.apache.spark.SparkConf
    
    [error]             Nil
    
    [error]           )
    
    [error]         )
    
    [error]       )
    
    [error]       ()
    
    [error]     )
    
    [error]   )
    
    [error] )
    
    [error]
    
    [error] == Expanded type of tree ==
    
    [error]
    
    [error] ConstantType(
    
    [error]   value =
    Constant(org.apache.spark.sql.catalyst.types.PrimitiveType)
    
    [error] )
    
    [error]
    
    [error] uncaught exception during compilation: java.lang.AssertionError
    
    java.lang.AssertionError: assertion failed: List(object package$DebugNode,
    object package$DebugNode)
    
    at scala.reflect.internal.Symbols$Symbol.suchThat(Symbols.scala:1678)
    
    at
    
scala.reflect.internal.Symbols$ClassSymbol.companionModule0(Symbols.scala:2988)
    
    at
    
scala.reflect.internal.Symbols$ClassSymbol.companionModule(Symbols.scala:2991)
    
    at
    scala.tools.nsc.backend.jvm.GenASM$JPlainBuilder.genClass(GenASM.scala:1371)
    
    at scala.tools.nsc.backend.jvm.GenASM$AsmPhase.run(GenASM.scala:120)
    
    at scala.tools.nsc.Global$Run.compileUnitsInternal(Global.scala:1583)
    
    at scala.tools.nsc.Global$Run.compileUnits(Global.scala:1557)
    
    at scala.tools.nsc.Global$Run.compileSources(Global.scala:1553)
    
    at scala.tools.nsc.Global$Run.compile(Global.scala:1662)
    
    at xsbt.CachedCompiler0.run(CompilerInterface.scala:123)
    
    at xsbt.CachedCompiler0.run(CompilerInterface.scala:99)
    
    at xsbt.CompilerInterface.run(CompilerInterface.scala:27)
    
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    
    at
    
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    
    at
    
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    
    at java.lang.reflect.Method.invoke(Method.java:606)
    
    at sbt.compiler.AnalyzingCompiler.call(AnalyzingCompiler.scala:102)
    
    at sbt.compiler.AnalyzingCompiler.compile(AnalyzingCompiler.scala:48)
    
    at sbt.compiler.AnalyzingCompiler.compile(AnalyzingCompiler.scala:41)
    
    at
    
sbt.compiler.AggressiveCompile$$anonfun$3$$anonfun$compileScala$1$1.apply$mcV$sp(AggressiveCompile.scala:99)
    
    at
    
sbt.compiler.AggressiveCompile$$anonfun$3$$anonfun$compileScala$1$1.apply(AggressiveCompile.scala:99)
    
    at
    
sbt.compiler.AggressiveCompile$$anonfun$3$$anonfun$compileScala$1$1.apply(AggressiveCompile.scala:99)
    
    at
    
sbt.compiler.AggressiveCompile.sbt$compiler$AggressiveCompile$$timed(AggressiveCompile.scala:166)
    
    at
    
sbt.compiler.AggressiveCompile$$anonfun$3.compileScala$1(AggressiveCompile.scala:98)
    
    at
    sbt.compiler.AggressiveCompile$$anonfun$3.apply(AggressiveCompile.scala:143)
    
    at
    sbt.compiler.AggressiveCompile$$anonfun$3.apply(AggressiveCompile.scala:87)
    
    at sbt.inc.IncrementalCompile$$anonfun$doCompile$1.apply(Compile.scala:39)
    
    at sbt.inc.IncrementalCompile$$anonfun$doCompile$1.apply(Compile.scala:37)
    
    at sbt.inc.IncrementalCommon.cycle(Incremental.scala:99)
    
    at sbt.inc.Incremental$$anonfun$1.apply(Incremental.scala:38)
    
    at sbt.inc.Incremental$$anonfun$1.apply(Incremental.scala:37)
    
    at sbt.inc.Incremental$.manageClassfiles(Incremental.scala:65)
    
    at sbt.inc.Incremental$.compile(Incremental.scala:37)
    
    at sbt.inc.IncrementalCompile$.apply(Compile.scala:27)
    
    at sbt.compiler.AggressiveCompile.compile2(AggressiveCompile.scala:157)
    
    at sbt.compiler.AggressiveCompile.compile1(AggressiveCompile.scala:71)
    
    at sbt.compiler.AggressiveCompile.apply(AggressiveCompile.scala:46)
    
    at sbt.Compiler$.apply(Compiler.scala:75)
    
    at sbt.Compiler$.apply(Compiler.scala:66)
    
    at sbt.Defaults$.sbt$Defaults$$compileTaskImpl(Defaults.scala:743)
    
    at sbt.Defaults$$anonfun$compileTask$1.apply(Defaults.scala:735)
    
    at sbt.Defaults$$anonfun$compileTask$1.apply(Defaults.scala:735)
    
    at scala.Function1$$anonfun$compose$1.apply(Function1.scala:47)
    
    at sbt.$tilde$greater$$anonfun$$u2219$1.apply(TypeFunctions.scala:42)
    
    at sbt.std.Transform$$anon$4.work(System.scala:64)
    
    at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
    
    at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
    
    at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
    
    at sbt.Execute.work(Execute.scala:244)
    
    at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
    
    at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
    
    at
    
sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
    
    at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
    
    at java.util.concurrent.FutureTask.run(FutureTask.java:262)
    
    at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
    
    at java.util.concurrent.FutureTask.run(FutureTask.java:262)
    
    at
    
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    
    at
    
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    
    at java.lang.Thread.run(Thread.java:724)
    
    [error] (sql/compile:compile) java.lang.AssertionError: assertion failed:
    List(object package$DebugNode, object package$DebugNode)
    
    [error] Total time: 55 s, completed Jun 22, 2014 11:45:36 AM
    
    
    On Sat, Jun 21, 2014 at 11:17 PM, MLnick <[email protected]> wrote:
    
    > 1.1 is not released yet. This PR is in master but not in 1.0 (it may be
    > released in 1.0.1 or if not then 1.1).
    >
    >
    > So you'll have to clone master and run sbt/sbt publish-local which will
    > publish the maven and sbt artifacts to your local repos.
    >
    >
    >
    > —
    > Sent from Mailbox
    >
    > On Sun, Jun 22, 2014 at 1:22 AM, Russell Jurney <[email protected]>
    >
    > wrote:
    >
    > > Thanks a ton! One thing - how can I pull spark core 1.1 from maven?
    > > [ERROR] Failed to execute goal on project avro: Could not resolve
    > > dependencies for project example:avro:jar:0.1: Could not find artifact
    > > org.apache.spark:spark-core_2.10:jar:1.1.0-SNAPSHOT in scala-tools.org
    > (
    > > http://scala-tools.org/repo-releases) -> [Help 1]
    > > On Fri, Jun 20, 2014 at 10:45 PM, MLnick <[email protected]>
    > wrote:
    > >> @rjurney <https://github.com/rjurney> this works for me (building
    > Spark
    > >> from current master):
    > https://gist.github.com/MLnick/5864741781b9340cb211
    > >>
    > >> if you run mvn package and then add that to SPARK_CLASSPATH and use it
    > in
    > >> IPython console.
    > >>
    > >> However it seems to come through as only strings (not a dict). I
    > verified
    > >> that if I take only the string field and explicitly convert to string
    > (ie Map[String,
    > >> String]) then it works. I suspect then that Avro doesn't have the type
    > >> information at all, so Pyrolite cannot pickle it. I guess you might
    > have to
    > >> do something more in depth in the AvroConverter to read the type info
    > >> from the Avro schema and do a cast...
    > >>
    > >> —
    > >> Reply to this email directly or view it on GitHub
    > >> <https://github.com/apache/spark/pull/455#issuecomment-46745394>.
    > >>
    > > --
    > > Russell Jurney twitter.com/rjurney [email protected]
    > datasyndrome.com
    > > ---
    > > Reply to this email directly or view it on GitHub:
    > > https://github.com/apache/spark/pull/455#issuecomment-46767642
    >
    > —
    > Reply to this email directly or view it on GitHub
    > <https://github.com/apache/spark/pull/455#issuecomment-46773577>.
    >
    
    
    
    -- 
    Russell Jurney twitter.com/rjurney [email protected] datasyndrome.com


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

Reply via email to