[ 
https://issues.apache.org/jira/browse/SPARK-14136?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15210841#comment-15210841
 ] 

Qi Dai commented on SPARK-14136:
--------------------------------

The stack for pyspark is:

Traceback (most recent call last):
  File "/s/qdai2/spark-2.0.0-SNAPSHOT-bin-hadoop2.7/python/pyspark/shell.py", 
line 38, in <module>
    sc = SparkContext()
  File "/s/qdai2/spark-2.0.0-SNAPSHOT-bin-hadoop2.7/python/pyspark/context.py", 
line 115, in __init__
    conf, jsc, profiler_cls)
  File "/s/qdai2/spark-2.0.0-SNAPSHOT-bin-hadoop2.7/python/pyspark/context.py", 
line 172, in _do_init
    self._jsc = jsc or self._initialize_context(self._conf._jconf)
  File "/s/qdai2/spark-2.0.0-SNAPSHOT-bin-hadoop2.7/python/pyspark/context.py", 
line 235, in _initialize_context
    return self._jvm.JavaSparkContext(jconf)
  File 
"/s/qdai2/spark-2.0.0-SNAPSHOT-bin-hadoop2.7/python/lib/py4j-0.9.2-src.zip/py4j/java_gateway.py",
 line 1086, in __call__
  File 
"/s/qdai2/spark-2.0.0-SNAPSHOT-bin-hadoop2.7/python/lib/py4j-0.9.2-src.zip/py4j/protocol.py",
 line 310, in get_return_value
py4j.protocol.Py4JJavaError: An error occurred while calling 
None.org.apache.spark.api.java.JavaSparkContext.
: java.lang.ClassNotFoundException: 
org.apache.spark.deploy.yarn.history.YarnHistoryService
        at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at java.lang.Class.forName0(Native Method)
        at java.lang.Class.forName(Class.java:348)
        at org.apache.spark.util.Utils$.classForName(Utils.scala:177)
        at 
org.apache.spark.scheduler.cluster.SchedulerExtensionServices$$anonfun$start$5.apply(SchedulerExtensionService.scala:109)
        at 
org.apache.spark.scheduler.cluster.SchedulerExtensionServices$$anonfun$start$5.apply(SchedulerExtensionService.scala:108)
        at 
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
        at 
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
        at scala.collection.mutable.ArraySeq.foreach(ArraySeq.scala:74)
        at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
        at scala.collection.AbstractTraversable.map(Traversable.scala:104)
        at 
org.apache.spark.scheduler.cluster.SchedulerExtensionServices.start(SchedulerExtensionService.scala:108)
        at 
org.apache.spark.scheduler.cluster.YarnSchedulerBackend.start(YarnSchedulerBackend.scala:81)
        at 
org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:62)
        at 
org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:144)
        at org.apache.spark.SparkContext.<init>(SparkContext.scala:501)
        at 
org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:58)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
        at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:234)
        at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:381)
        at py4j.Gateway.invoke(Gateway.java:241)
        at 
py4j.commands.ConstructorCommand.invokeConstructor(ConstructorCommand.java:79)
        at py4j.commands.ConstructorCommand.execute(ConstructorCommand.java:68)
        at py4j.GatewayConnection.run(GatewayConnection.java:209)
        at java.lang.Thread.run(Thread.java:745)


> Spark 2.0 can't start with yarn mode with ClassNotFoundException: 
> org.apache.spark.deploy.yarn.history.YarnHistoryService
> -------------------------------------------------------------------------------------------------------------------------
>
>                 Key: SPARK-14136
>                 URL: https://issues.apache.org/jira/browse/SPARK-14136
>             Project: Spark
>          Issue Type: Bug
>          Components: PySpark, Spark Shell, YARN
>    Affects Versions: 2.0.0
>         Environment: HortonworksHadoop2.7.1 HDP2.3.2 Java1.8.40
>            Reporter: Qi Dai
>
> For the recent Spark nightly master builds (I tried current build and many of 
> last couple weeks builds), the spark-shell/pyspark can't start in yarn mode 
> with ClassNotFoundException: 
> org.apache.spark.deploy.yarn.history.YarnHistoryService
> The full stack is:
> java.lang.ClassNotFoundException: 
> org.apache.spark.deploy.yarn.history.YarnHistoryService
>   at 
> scala.reflect.internal.util.AbstractFileClassLoader.findClass(AbstractFileClassLoader.scala:62)
>   at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
>   at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
>   at java.lang.Class.forName0(Native Method)
>   at java.lang.Class.forName(Class.java:348)
>   at org.apache.spark.util.Utils$.classForName(Utils.scala:177)
>   at 
> org.apache.spark.scheduler.cluster.SchedulerExtensionServices$$anonfun$start$5.apply(SchedulerExtensionService.scala:109)
>   at 
> org.apache.spark.scheduler.cluster.SchedulerExtensionServices$$anonfun$start$5.apply(SchedulerExtensionService.scala:108)
>   at 
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>   at 
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245)
>   at scala.collection.mutable.ArraySeq.foreach(ArraySeq.scala:74)
>   at scala.collection.TraversableLike$class.map(TraversableLike.scala:245)
>   at scala.collection.AbstractTraversable.map(Traversable.scala:104)
>   at 
> org.apache.spark.scheduler.cluster.SchedulerExtensionServices.start(SchedulerExtensionService.scala:108)
>   at 
> org.apache.spark.scheduler.cluster.YarnSchedulerBackend.start(YarnSchedulerBackend.scala:81)
>   at 
> org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:62)
>   at 
> org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:144)
>   at org.apache.spark.SparkContext.<init>(SparkContext.scala:501)
>   at org.apache.spark.repl.Main$.createSparkContext(Main.scala:89)
>   ... 48 elided
> java.lang.NullPointerException
>   at 
> org.apache.spark.sql.SQLContext$.createListenerAndUI(SQLContext.scala:1020)
>   at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:91)
>   at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>   at 
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>   at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>   at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
>   at org.apache.spark.repl.Main$.createSQLContext(Main.scala:99)
>   ... 48 elided
> <console>:13: error: not found: value sqlContext
>        import sqlContext.implicits._
>               ^
> <console>:13: error: not found: value sqlContext
>        import sqlContext.sql
>               ^



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to