[
https://issues.apache.org/jira/browse/TOREE-528?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17357584#comment-17357584
]
Chris Van Houtte commented on TOREE-528:
----------------------------------------
Same error with Scala 2.12.2.
Different error with spark-2.4.8-bin-hadoop2.7 and Scala 2.11.12:
```
[init] error: error while loading Object, Missing dependency 'object
scala.reflect.ScalaSignature in compiler mirror', required by
/usr/lib/jvm/java-11-openjdk-amd64(java/lang/Object.class)
Failed to initialize compiler: object scala.runtime in compiler mirror not
found.
** Note that as of 2.8 scala does not assume use of the java classpath.
** For the old behavior pass -usejavacp to scala, or if using a Settings
** object programmatically, settings.usejavacp.value = true.
Failed to initialize compiler: object scala.runtime in compiler mirror not
found.
** Note that as of 2.8 scala does not assume use of the java classpath.
** For the old behavior pass -usejavacp to scala, or if using a Settings
** object programmatically, settings.usejavacp.value = true.
Exception in thread "main" java.lang.NullPointerException
at scala.reflect.internal.SymbolTable.exitingPhase(SymbolTable.scala:256)
at scala.tools.nsc.interpreter.IMain$Request.x$20$lzycompute(IMain.scala:903)
at scala.tools.nsc.interpreter.IMain$Request.x$20(IMain.scala:902)
at
scala.tools.nsc.interpreter.IMain$Request.headerPreamble$lzycompute(IMain.scala:902)
at scala.tools.nsc.interpreter.IMain$Request.headerPreamble(IMain.scala:902)
at scala.tools.nsc.interpreter.IMain$Request$Wrapper.preamble(IMain.scala:925)
at
scala.tools.nsc.interpreter.IMain$CodeAssembler$$anonfun$apply$23.apply(IMain.scala:1344)
at
scala.tools.nsc.interpreter.IMain$CodeAssembler$$anonfun$apply$23.apply(IMain.scala:1343)
at scala.tools.nsc.util.package$.stringFromWriter(package.scala:64)
at
scala.tools.nsc.interpreter.IMain$CodeAssembler$class.apply(IMain.scala:1343)
at scala.tools.nsc.interpreter.IMain$Request$Wrapper.apply(IMain.scala:915)
at
scala.tools.nsc.interpreter.IMain$Request.compile$lzycompute(IMain.scala:1009)
at scala.tools.nsc.interpreter.IMain$Request.compile(IMain.scala:1004)
at scala.tools.nsc.interpreter.IMain.compile(IMain.scala:586)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:574)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:572)
at
org.apache.toree.kernel.interpreter.scala.ScalaInterpreterSpecific$$anonfun$start$1.apply(ScalaInterpreterSpecific.scala:292)
at
org.apache.toree.kernel.interpreter.scala.ScalaInterpreterSpecific$$anonfun$start$1.apply(ScalaInterpreterSpecific.scala:286)
at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)
at
org.apache.toree.kernel.interpreter.scala.ScalaInterpreterSpecific$class.start(ScalaInterpreterSpecific.scala:286)
at
org.apache.toree.kernel.interpreter.scala.ScalaInterpreter.start(ScalaInterpreter.scala:43)
at
org.apache.toree.kernel.interpreter.scala.ScalaInterpreter.init(ScalaInterpreter.scala:94)
at
org.apache.toree.boot.layer.InterpreterManager$$anonfun$initializeInterpreters$1.apply(InterpreterManager.scala:35)
at
org.apache.toree.boot.layer.InterpreterManager$$anonfun$initializeInterpreters$1.apply(InterpreterManager.scala:34)
at scala.collection.Iterator$class.foreach(Iterator.scala:891)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
at scala.collection.MapLike$DefaultValuesIterable.foreach(MapLike.scala:206)
at
org.apache.toree.boot.layer.InterpreterManager.initializeInterpreters(InterpreterManager.scala:34)
at
org.apache.toree.boot.layer.StandardComponentInitialization$class.initializeComponents(ComponentInitialization.scala:87)
at org.apache.toree.Main$$anon$1.initializeComponents(Main.scala:35)
at org.apache.toree.boot.KernelBootstrap.initialize(KernelBootstrap.scala:100)
at
org.apache.toree.Main$.delayedEndpoint$org$apache$toree$Main$1(Main.scala:40)
at org.apache.toree.Main$delayedInit$body.apply(Main.scala:24)
at scala.Function0$class.apply$mcV$sp(Function0.scala:34)
at scala.runtime.AbstractFunction0.apply$mcV$sp(AbstractFunction0.scala:12)
at scala.App$$anonfun$main$1.apply(App.scala:76)
at scala.App$$anonfun$main$1.apply(App.scala:76)
at scala.collection.immutable.List.foreach(List.scala:392)
at
scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:35)
at scala.App$class.main(App.scala:76)
at org.apache.toree.Main$.main(Main.scala:24)
at org.apache.toree.Main.main(Main.scala)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native
Method)
at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:855)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:161)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:184)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:930)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:939)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
```
> Unable to start kernel
> ----------------------
>
> Key: TOREE-528
> URL: https://issues.apache.org/jira/browse/TOREE-528
> Project: TOREE
> Issue Type: Bug
> Components: Kernel
> Affects Versions: 0.4.0
> Environment: Debian 10
> Reporter: Chris Van Houtte
> Priority: Major
>
> Hi there
> I cannot start the kernel on Debian 10
> $ scala -version
> Scala code runner version 2.11.12 – Copyright 2002-2017, LAMP/EPFL
> Followed the instructions here, in an anaconda environment
> [https://toree.apache.org/docs/current/user/installation/]
> The only deviation being adding the --user argument as follows
> ```
> $ jupyter toree install --spark_home=/opt/spark-3.1.1-bin-hadoop3.2/ --user
> ```
> Spark itself works fine via the terminal
>
> Error message:
> Starting Spark Kernel with SPARK_HOME=/opt/spark-3.1.1-bin-hadoop3.2/
> 21/06/04 11:46:08 WARN Utils: Your hostname, debian-ds resolves to a
> loopback address: 127.0.1.1; using 10.0.6.51 instead (on interface eno1)
> 21/06/04 11:46:08 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to
> another address
> WARNING: An illegal reflective access operation has occurred
> WARNING: Illegal reflective access by org.apache.spark.unsafe.Platform
> ([file:/opt/spark-3.1.1-bin-hadoop3.2/jars/spark-unsafe_2.12-3.1.1.jar|file:///opt/spark-3.1.1-bin-hadoop3.2/jars/spark-unsafe_2.12-3.1.1.jar])
> to constructor java.nio.DirectByteBuffer(long,int)
> WARNING: Please consider reporting this to the maintainers of
> org.apache.spark.unsafe.Platform
> WARNING: Use --illegal-access=warn to enable warnings of further illegal
> reflective access operations
> WARNING: All illegal access operations will be denied in a future release
> 21/06/04 11:46:08 WARN NativeCodeLoader: Unable to load native-hadoop
> library for your platform... using builtin-java classes where applicable
> Exception in thread "main" java.lang.NoClassDefFoundError: scala/App$class
> at org.apache.toree.Main$.<init>(Main.scala:24)
> at org.apache.toree.Main$.<clinit>(Main.scala)
> at org.apache.toree.Main.main(Main.scala)
> at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native
> Method)
> at
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
> java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.base/java.lang.reflect.Method.invoke(Method.java:566)
> at
> org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
> at
> org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:951)
> at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)
> at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
> at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
> at
> org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1030)
> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1039)
> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> Caused by: java.lang.ClassNotFoundException: scala.App$class
> at java.base/java.net.URLClassLoader.findClass(URLClassLoader.java:471)
> at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:589)
> at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:522)
--
This message was sent by Atlassian Jira
(v8.3.4#803005)