getting exception when trying to build spark from master

2014-11-10 Thread Sadhan Sood
Getting an exception while trying to build spark in spark-core:

[ERROR]

 while compiling:
/Users/dev/tellapart_spark/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala

during phase: typer

 library version: version 2.10.4

compiler version: version 2.10.4

  reconstructed args: -deprecation -feature -classpath


  last tree to typer: Ident(enumDispatcher)

  symbol: value enumDispatcher (flags: triedcooking)

   symbol definition: val enumDispatcher:
java.util.EnumSet[javax.servlet.DispatcherType]

 tpe: java.util.EnumSet[javax.servlet.DispatcherType]

   symbol owners: value enumDispatcher - value $anonfun - method
addFilters - object JettyUtils - package ui

  context owners: value $anonfun - value $anonfun - method addFilters
- object JettyUtils - package ui


== Enclosing template or block ==


Block(

  ValDef( // val filters: Array[String]

triedcooking

filters

AppliedTypeTree(

  Array

  String

)

Apply(

  conf.get(spark.ui.filters, ).split(',').map

  Function( // val $anonfun: notype, tree.tpe=String = String

ValDef( // x$1: String

  param synthetic triedcooking

  x$1

  tpt // tree.tpe=String

  empty

)

Apply( // def trim(): String in class String, tree.tpe=String

  x$1.trim // def trim(): String in class String,
tree.tpe=()String

  Nil

)

  )

)

  )

  Apply(

filters.foreach

Match(

  empty

  CaseDef(

Bind( // val filter: String

  filter

  Typed(

_ // tree.tpe=String

String

  )

)

If(

  filter.isEmpty.unary_$bang

  Block(

// 7 statements

Apply(

  logInfo

  Apply( // final def +(x$1: Any): String in class String,
tree.tpe=String

Adding filter: .$plus // final def +(x$1: Any): String
in class String, tree.tpe=(x$1: Any)String

filter // val filter: String, tree.tpe=String

  )

)

ValDef( // val holder: org.eclipse.jetty.servlet.FilterHolder

  triedcooking

  holder

  FilterHolder

  Apply(

new FilterHolder.init

Nil

  )

)

Apply( // def setClassName(x$1: String): Unit in class Holder,
tree.tpe=Unit

  holder.setClassName // def setClassName(x$1: String):
Unit in class Holder, tree.tpe=(x$1: String)Unit

  filter // val filter: String, tree.tpe=String

)

Apply(

  conf.get(spark..+(filter).+(.params),
).split(',').map(((x$2: String) = x$2.trim())).toSet.foreach

  Function( // val $anonfun: notype

ValDef( // param: String

  param triedcooking

  param

  String

  empty

)

If(

  param.isEmpty.unary_$bang

  Block(

ValDef( // val parts: Array[String]

  triedcooking

  parts

  tpt // tree.tpe=Array[String]

  Apply( // def split(x$1: String): Array[String] in
class String, tree.tpe=Array[String]

param.split // def split(x$1: String):
Array[String] in class String, tree.tpe=(x$1: String)Array[String]

=

  )

)

If(

  Apply( // def ==(x: Int): Boolean in class Int,
tree.tpe=Boolean

parts.length.$eq$eq // def ==(x: Int):
Boolean in class Int, tree.tpe=(x: Int)Boolean

2

  )

  Apply( // def setInitParameter(x$1: String,x$2:
String): Unit in class Holder

holder.setInitParameter // def
setInitParameter(x$1: String,x$2: String): Unit in class Holder,
tree.tpe=(x$1: String, x$2: String)Unit

// 2 arguments

Apply( // val parts: Array[String]

  parts // val parts: Array[String],
tree.tpe=parts.type

  0

)

Apply( // val parts: Array[String]

  parts // val parts: Array[String],
tree.tpe=parts.type

  1

)

  )

  ()

)

  )

  ()

)

  )

)

ValDef( // val prefix: String

  triedcooking

  prefix

  tpt // tree.tpe=String

  Apply(

StringContext(spark., .param.).s

  

Re: getting exception when trying to build spark from master

2014-11-10 Thread Josh Rosen
It looks like the Jenkins maven builds are broken, too.  Based on the
Jenkins logs, I think that this pull request may have broken things
(although I'm not sure why):

https://github.com/apache/spark/pull/3030#issuecomment-62436181

On Mon, Nov 10, 2014 at 1:42 PM, Sadhan Sood sadhan.s...@gmail.com wrote:

 Getting an exception while trying to build spark in spark-core:

 [ERROR]

  while compiling:

 /Users/dev/tellapart_spark/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala

 during phase: typer

  library version: version 2.10.4

 compiler version: version 2.10.4

   reconstructed args: -deprecation -feature -classpath


   last tree to typer: Ident(enumDispatcher)

   symbol: value enumDispatcher (flags: triedcooking)

symbol definition: val enumDispatcher:
 java.util.EnumSet[javax.servlet.DispatcherType]

  tpe: java.util.EnumSet[javax.servlet.DispatcherType]

symbol owners: value enumDispatcher - value $anonfun - method
 addFilters - object JettyUtils - package ui

   context owners: value $anonfun - value $anonfun - method addFilters
 - object JettyUtils - package ui


 == Enclosing template or block ==


 Block(

   ValDef( // val filters: Array[String]

 triedcooking

 filters

 AppliedTypeTree(

   Array

   String

 )

 Apply(

   conf.get(spark.ui.filters, ).split(',').map

   Function( // val $anonfun: notype, tree.tpe=String = String

 ValDef( // x$1: String

   param synthetic triedcooking

   x$1

   tpt // tree.tpe=String

   empty

 )

 Apply( // def trim(): String in class String, tree.tpe=String

   x$1.trim // def trim(): String in class String,
 tree.tpe=()String

   Nil

 )

   )

 )

   )

   Apply(

 filters.foreach

 Match(

   empty

   CaseDef(

 Bind( // val filter: String

   filter

   Typed(

 _ // tree.tpe=String

 String

   )

 )

 If(

   filter.isEmpty.unary_$bang

   Block(

 // 7 statements

 Apply(

   logInfo

   Apply( // final def +(x$1: Any): String in class String,
 tree.tpe=String

 Adding filter: .$plus // final def +(x$1: Any): String
 in class String, tree.tpe=(x$1: Any)String

 filter // val filter: String, tree.tpe=String

   )

 )

 ValDef( // val holder: org.eclipse.jetty.servlet.FilterHolder

   triedcooking

   holder

   FilterHolder

   Apply(

 new FilterHolder.init

 Nil

   )

 )

 Apply( // def setClassName(x$1: String): Unit in class Holder,
 tree.tpe=Unit

   holder.setClassName // def setClassName(x$1: String):
 Unit in class Holder, tree.tpe=(x$1: String)Unit

   filter // val filter: String, tree.tpe=String

 )

 Apply(

   conf.get(spark..+(filter).+(.params),
 ).split(',').map(((x$2: String) = x$2.trim())).toSet.foreach

   Function( // val $anonfun: notype

 ValDef( // param: String

   param triedcooking

   param

   String

   empty

 )

 If(

   param.isEmpty.unary_$bang

   Block(

 ValDef( // val parts: Array[String]

   triedcooking

   parts

   tpt // tree.tpe=Array[String]

   Apply( // def split(x$1: String): Array[String] in
 class String, tree.tpe=Array[String]

 param.split // def split(x$1: String):
 Array[String] in class String, tree.tpe=(x$1: String)Array[String]

 =

   )

 )

 If(

   Apply( // def ==(x: Int): Boolean in class Int,
 tree.tpe=Boolean

 parts.length.$eq$eq // def ==(x: Int):
 Boolean in class Int, tree.tpe=(x: Int)Boolean

 2

   )

   Apply( // def setInitParameter(x$1: String,x$2:
 String): Unit in class Holder

 holder.setInitParameter // def
 setInitParameter(x$1: String,x$2: String): Unit in class Holder,
 tree.tpe=(x$1: String, x$2: String)Unit

 // 2 arguments

 Apply( // val parts: Array[String]

   parts // val parts: Array[String],
 tree.tpe=parts.type

   0

 )

 Apply( // val parts: Array[String]

   parts // val parts: Array[String],
 tree.tpe=parts.type


Re: getting exception when trying to build spark from master

2014-11-10 Thread Sadhan Sood
I reverted the patch locally, seems to be working for me.

On Mon, Nov 10, 2014 at 6:00 PM, Patrick Wendell pwend...@gmail.com wrote:

 I reverted that patch to see if it fixes it.

 On Mon, Nov 10, 2014 at 1:45 PM, Josh Rosen rosenvi...@gmail.com wrote:
  It looks like the Jenkins maven builds are broken, too.  Based on the
  Jenkins logs, I think that this pull request may have broken things
  (although I'm not sure why):
 
  https://github.com/apache/spark/pull/3030#issuecomment-62436181
 
  On Mon, Nov 10, 2014 at 1:42 PM, Sadhan Sood sadhan.s...@gmail.com
 wrote:
 
  Getting an exception while trying to build spark in spark-core:
 
  [ERROR]
 
   while compiling:
 
 
 /Users/dev/tellapart_spark/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
 
  during phase: typer
 
   library version: version 2.10.4
 
  compiler version: version 2.10.4
 
reconstructed args: -deprecation -feature -classpath
 
 
last tree to typer: Ident(enumDispatcher)
 
symbol: value enumDispatcher (flags: triedcooking)
 
 symbol definition: val enumDispatcher:
  java.util.EnumSet[javax.servlet.DispatcherType]
 
   tpe: java.util.EnumSet[javax.servlet.DispatcherType]
 
 symbol owners: value enumDispatcher - value $anonfun - method
  addFilters - object JettyUtils - package ui
 
context owners: value $anonfun - value $anonfun - method
 addFilters
  - object JettyUtils - package ui
 
 
  == Enclosing template or block ==
 
 
  Block(
 
ValDef( // val filters: Array[String]
 
  triedcooking
 
  filters
 
  AppliedTypeTree(
 
Array
 
String
 
  )
 
  Apply(
 
conf.get(spark.ui.filters, ).split(',').map
 
Function( // val $anonfun: notype, tree.tpe=String = String
 
  ValDef( // x$1: String
 
param synthetic triedcooking
 
x$1
 
tpt // tree.tpe=String
 
empty
 
  )
 
  Apply( // def trim(): String in class String, tree.tpe=String
 
x$1.trim // def trim(): String in class String,
  tree.tpe=()String
 
Nil
 
  )
 
)
 
  )
 
)
 
Apply(
 
  filters.foreach
 
  Match(
 
empty
 
CaseDef(
 
  Bind( // val filter: String
 
filter
 
Typed(
 
  _ // tree.tpe=String
 
  String
 
)
 
  )
 
  If(
 
filter.isEmpty.unary_$bang
 
Block(
 
  // 7 statements
 
  Apply(
 
logInfo
 
Apply( // final def +(x$1: Any): String in class String,
  tree.tpe=String
 
  Adding filter: .$plus // final def +(x$1: Any):
 String
  in class String, tree.tpe=(x$1: Any)String
 
  filter // val filter: String, tree.tpe=String
 
)
 
  )
 
  ValDef( // val holder:
 org.eclipse.jetty.servlet.FilterHolder
 
triedcooking
 
holder
 
FilterHolder
 
Apply(
 
  new FilterHolder.init
 
  Nil
 
)
 
  )
 
  Apply( // def setClassName(x$1: String): Unit in class
 Holder,
  tree.tpe=Unit
 
holder.setClassName // def setClassName(x$1: String):
  Unit in class Holder, tree.tpe=(x$1: String)Unit
 
filter // val filter: String, tree.tpe=String
 
  )
 
  Apply(
 
conf.get(spark..+(filter).+(.params),
  ).split(',').map(((x$2: String) = x$2.trim())).toSet.foreach
 
Function( // val $anonfun: notype
 
  ValDef( // param: String
 
param triedcooking
 
param
 
String
 
empty
 
  )
 
  If(
 
param.isEmpty.unary_$bang
 
Block(
 
  ValDef( // val parts: Array[String]
 
triedcooking
 
parts
 
tpt // tree.tpe=Array[String]
 
Apply( // def split(x$1: String): Array[String] in
  class String, tree.tpe=Array[String]
 
  param.split // def split(x$1: String):
  Array[String] in class String, tree.tpe=(x$1: String)Array[String]
 
  =
 
)
 
  )
 
  If(
 
Apply( // def ==(x: Int): Boolean in class Int,
  tree.tpe=Boolean
 
  parts.length.$eq$eq // def ==(x: Int):
  Boolean in class Int, tree.tpe=(x: Int)Boolean
 
  2
 
)
 
Apply( // def setInitParameter(x$1: String,x$2:
  String): Unit in class Holder
 
  holder.setInitParameter // def