[
https://issues.apache.org/jira/browse/SPARK-13710?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15182565#comment-15182565
]
Masayoshi TSUZUKI commented on SPARK-13710:
-------------------------------------------
It shows the similar ERROR message and stacktrace also when exit from spark
shell.
{noformat}
scala> :quit
16/03/07 13:06:31 INFO SparkUI: Stopped Spark web UI at
http://192.168.33.129:4040
16/03/07 13:06:31 INFO MapOutputTrackerMasterEndpoint:
MapOutputTrackerMasterEndpoint stopped!
16/03/07 13:06:31 INFO MemoryStore: MemoryStore cleared
16/03/07 13:06:31 INFO BlockManager: BlockManager stopped
16/03/07 13:06:31 INFO BlockManagerMaster: BlockManagerMaster stopped
16/03/07 13:06:31 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint:
OutputCommitCoordinator stopped!
16/03/07 13:06:31 INFO SparkContext: Successfully stopped SparkContext
[WARN] Task failed
java.lang.NoClassDefFoundError: Could not initialize class
scala.tools.fusesource_embedded.jansi.internal.Kernel32
at
scala.tools.fusesource_embedded.jansi.internal.WindowsSupport.setConsoleMode(WindowsSupport.java:60)
at
scala.tools.jline_embedded.WindowsTerminal.setConsoleMode(WindowsTerminal.java:208)
at
scala.tools.jline_embedded.WindowsTerminal.restore(WindowsTerminal.java:95)
at
scala.tools.jline_embedded.TerminalSupport$1.run(TerminalSupport.java:52)
at
scala.tools.jline_embedded.internal.ShutdownHooks.runTasks(ShutdownHooks.java:66)
at
scala.tools.jline_embedded.internal.ShutdownHooks.access$000(ShutdownHooks.java:22)
at
scala.tools.jline_embedded.internal.ShutdownHooks$1.run(ShutdownHooks.java:47)
16/03/07 13:06:31 INFO ShutdownHookManager: Shutdown hook called
16/03/07 13:06:31 INFO ShutdownHookManager: Deleting directory
C:\Users\tsudukim\AppData\Local\Temp\spark-d9077a51-fc78-4852-ad45-2b7085d72940\repl-f753505f-69cf-4593-bb21-f5aa2683bcca
16/03/07 13:06:31 ERROR ShutdownHookManager: Exception while deleting Spark
temp dir:
C:\Users\tsudukim\AppData\Local\Temp\spark-d9077a51-fc78-4852-ad45-2b7085d72940\repl-f753505f-69cf-4593-bb21-f5aa2683bcca
java.io.IOException: Failed to delete:
C:\Users\tsudukim\AppData\Local\Temp\spark-d9077a51-fc78-4852-ad45-2b7085d72940\repl-f753505f-69cf-4593-bb21-f5aa2683bcca
at org.apache.spark.util.Utils$.deleteRecursively(Utils.scala:935)
at
org.apache.spark.util.ShutdownHookManager$$anonfun$1$$anonfun$apply$mcV$sp$3.apply(ShutdownHookManager.scala:64)
at
org.apache.spark.util.ShutdownHookManager$$anonfun$1$$anonfun$apply$mcV$sp$3.apply(ShutdownHookManager.scala:61)
at
scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
at
org.apache.spark.util.ShutdownHookManager$$anonfun$1.apply$mcV$sp(ShutdownHookManager.scala:61)
at
org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:217)
at
org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ShutdownHookManager.scala:189)
at
org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:189)
at
org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:189)
at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1788)
at
org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply$mcV$sp(ShutdownHookManager.scala:189)
at
org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:189)
at
org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:189)
at scala.util.Try$.apply(Try.scala:192)
at
org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:189)
at
org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:179)
at
org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:54)
16/03/07 13:06:31 INFO ShutdownHookManager: Deleting directory
C:\Users\tsudukim\AppData\Local\Temp\spark-d9077a51-fc78-4852-ad45-2b7085d72940
16/03/07 13:06:31 ERROR ShutdownHookManager: Exception while deleting Spark
temp dir:
C:\Users\tsudukim\AppData\Local\Temp\spark-d9077a51-fc78-4852-ad45-2b7085d72940
java.io.IOException: Failed to delete:
C:\Users\tsudukim\AppData\Local\Temp\spark-d9077a51-fc78-4852-ad45-2b7085d72940
at org.apache.spark.util.Utils$.deleteRecursively(Utils.scala:935)
at
org.apache.spark.util.ShutdownHookManager$$anonfun$1$$anonfun$apply$mcV$sp$3.apply(ShutdownHookManager.scala:64)
at
org.apache.spark.util.ShutdownHookManager$$anonfun$1$$anonfun$apply$mcV$sp$3.apply(ShutdownHookManager.scala:61)
at
scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
at
org.apache.spark.util.ShutdownHookManager$$anonfun$1.apply$mcV$sp(ShutdownHookManager.scala:61)
at
org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:217)
at
org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ShutdownHookManager.scala:189)
at
org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:189)
at
org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:189)
at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1788)
at
org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply$mcV$sp(ShutdownHookManager.scala:189)
at
org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:189)
at
org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:189)
at scala.util.Try$.apply(Try.scala:192)
at
org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:189)
at
org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:179)
at
org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:54)
{noformat}
> Spark shell shows ERROR when launching on Windows
> -------------------------------------------------
>
> Key: SPARK-13710
> URL: https://issues.apache.org/jira/browse/SPARK-13710
> Project: Spark
> Issue Type: Bug
> Components: Spark Shell, Windows
> Reporter: Masayoshi TSUZUKI
> Priority: Minor
>
> On Windows, when we launch {{bin\spark-shell.cmd}}, it shows ERROR message
> and stacktrace.
> {noformat}
> C:\Users\tsudukim\Documents\workspace\spark-dev3>bin\spark-shell
> [ERROR] Terminal initialization failed; falling back to unsupported
> java.lang.NoClassDefFoundError: Could not initialize class
> scala.tools.fusesource_embedded.jansi.internal.Kernel32
> at
> scala.tools.fusesource_embedded.jansi.internal.WindowsSupport.getConsoleMode(WindowsSupport.java:50)
> at
> scala.tools.jline_embedded.WindowsTerminal.getConsoleMode(WindowsTerminal.java:204)
> at
> scala.tools.jline_embedded.WindowsTerminal.init(WindowsTerminal.java:82)
> at
> scala.tools.jline_embedded.TerminalFactory.create(TerminalFactory.java:101)
> at
> scala.tools.jline_embedded.TerminalFactory.get(TerminalFactory.java:158)
> at
> scala.tools.jline_embedded.console.ConsoleReader.<init>(ConsoleReader.java:229)
> at
> scala.tools.jline_embedded.console.ConsoleReader.<init>(ConsoleReader.java:221)
> at
> scala.tools.jline_embedded.console.ConsoleReader.<init>(ConsoleReader.java:209)
> at
> scala.tools.nsc.interpreter.jline_embedded.JLineConsoleReader.<init>(JLineReader.scala:61)
> at
> scala.tools.nsc.interpreter.jline_embedded.InteractiveReader.<init>(JLineReader.scala:33)
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
> at
> scala.tools.nsc.interpreter.ILoop$$anonfun$scala$tools$nsc$interpreter$ILoop$$instantiate$1$1.apply(ILoop.scala:865)
> at
> scala.tools.nsc.interpreter.ILoop$$anonfun$scala$tools$nsc$interpreter$ILoop$$instantiate$1$1.apply(ILoop.scala:862)
> at
> scala.tools.nsc.interpreter.ILoop.scala$tools$nsc$interpreter$ILoop$$mkReader$1(ILoop.scala:871)
> at
> scala.tools.nsc.interpreter.ILoop$$anonfun$15$$anonfun$apply$8.apply(ILoop.scala:875)
> at
> scala.tools.nsc.interpreter.ILoop$$anonfun$15$$anonfun$apply$8.apply(ILoop.scala:875)
> at scala.util.Try$.apply(Try.scala:192)
> at
> scala.tools.nsc.interpreter.ILoop$$anonfun$15.apply(ILoop.scala:875)
> at
> scala.tools.nsc.interpreter.ILoop$$anonfun$15.apply(ILoop.scala:875)
> at
> scala.collection.immutable.Stream$$anonfun$map$1.apply(Stream.scala:418)
> at
> scala.collection.immutable.Stream$$anonfun$map$1.apply(Stream.scala:418)
> at scala.collection.immutable.Stream$Cons.tail(Stream.scala:1233)
> at scala.collection.immutable.Stream$Cons.tail(Stream.scala:1223)
> at scala.collection.immutable.Stream.collect(Stream.scala:435)
> at scala.tools.nsc.interpreter.ILoop.chooseReader(ILoop.scala:877)
> at
> scala.tools.nsc.interpreter.ILoop$$anonfun$process$1$$anonfun$apply$mcZ$sp$2.apply(ILoop.scala:916)
> at
> scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:916)
> at
> scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:911)
> at
> scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:911)
> at
> scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97)
> at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:911)
> at org.apache.spark.repl.Main$.doMain(Main.scala:64)
> at org.apache.spark.repl.Main$.main(Main.scala:47)
> at org.apache.spark.repl.Main.main(Main.scala)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:497)
> at
> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:737)
> at
> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183)
> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208)
> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:122)
> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> Using Spark's default log4j profile:
> org/apache/spark/log4j-defaults.properties
> Setting default log level to "WARN".
> To adjust logging level use sc.setLogLevel(newLevel).
> 16/03/07 13:05:32 WARN NativeCodeLoader: Unable to load native-hadoop library
> for your platform... using builtin-java classes where applicable
> Spark context available as sc (master = local[*], app id =
> local-1457323533704).
> SQL context available as sqlContext.
> Welcome to
> ____ __
> / __/__ ___ _____/ /__
> _\ \/ _ \/ _ `/ __/ '_/
> /___/ .__/\_,_/_/ /_/\_\ version 2.0.0-SNAPSHOT
> /_/
> Using Scala version 2.11.7 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_40)
> Type in expressions to have them evaluated.
> Type :help for more information.
> scala> sc.textFile("README.md")
> res0: org.apache.spark.rdd.RDD[String] = README.md MapPartitionsRDD[1] at
> textFile at <console>:25
> scala> sc.textFile("README.md").count()
> res1: Long = 97
> {noformat}
> Spark-shell itself seems to work file during my simple operation check.
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]