[ 
https://issues.apache.org/jira/browse/SPARK-35902?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Apache Spark reassigned SPARK-35902:
------------------------------------

    Assignee:     (was: Apache Spark)

> spark.driver.log.dfsDir with hdfs scheme failed
> -----------------------------------------------
>
>                 Key: SPARK-35902
>                 URL: https://issues.apache.org/jira/browse/SPARK-35902
>             Project: Spark
>          Issue Type: Bug
>          Components: Spark Core
>    Affects Versions: 3.1.0, 3.1.1, 3.1.2
>         Environment: Spark3.1.1 Hadoop 3.1.1
>            Reporter: YuanGuanhu
>            Priority: Major
>
> when i set spark.driver.log.dfsDir value with hdfs scheme path, it throw an 
> exception:
> spark.driver.log.persistToDfs.enabled = true
> spark.driver.log.dfsDir = hdfs://hacluster/spark2xdriverlogs1
>  
> 2021-06-25 14:56:45,786 | ERROR | main | Could not persist driver logs to dfs 
> | org.apache.spark.util.logging.DriverLogger.logError(Logging.scala:94)
>  java.lang.IllegalArgumentException: Pathname 
> /opt/client811/Spark2x/spark/hdfs:/hacluster/spark2xdriverlogs1 from 
> /opt/client811/Spark2x/spark/hdfs:/hacluster/spark2xdriverlogs1 is not a 
> valid DFS filename.
>  at 
> org.apache.hadoop.hdfs.DistributedFileSystem.getPathName(DistributedFileSystem.java:252)
>  at 
> org.apache.hadoop.hdfs.DistributedFileSystem$27.doCall(DistributedFileSystem.java:1375)
>  at 
> org.apache.hadoop.hdfs.DistributedFileSystem$27.doCall(DistributedFileSystem.java:1372)
>  at 
> org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
>  at 
> org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFileSystem.java:1389)
>  at 
> org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:1364)
>  at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:2410)
>  at 
> org.apache.spark.deploy.SparkHadoopUtil$.createFile(SparkHadoopUtil.scala:528)
>  at 
> org.apache.spark.util.logging.DriverLogger$DfsAsyncWriter.init(DriverLogger.scala:118)
>  at 
> org.apache.spark.util.logging.DriverLogger$DfsAsyncWriter.<init>(DriverLogger.scala:104)
>  at 
> org.apache.spark.util.logging.DriverLogger.startSync(DriverLogger.scala:72)
>  at 
> org.apache.spark.SparkContext.$anonfun$postApplicationStart$1(SparkContext.scala:2688)
>  at 
> org.apache.spark.SparkContext.$anonfun$postApplicationStart$1$adapted(SparkContext.scala:2688)
>  at scala.Option.foreach(Option.scala:407)
>  at 
> org.apache.spark.SparkContext.postApplicationStart(SparkContext.scala:2688)
>  at org.apache.spark.SparkContext.<init>(SparkContext.scala:640)
>  at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2814)
>  at 
> org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:947)
>  at scala.Option.getOrElse(Option.scala:189)
>  at 
> org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:941)
>  at org.apache.spark.repl.Main$.createSparkSession(Main.scala:106)
>  at $line3.$read$$iw$$iw.<init>(<console>:15)
>  at $line3.$read$$iw.<init>(<console>:42)
>  at $line3.$read.<init>(<console>:44)
>  at $line3.$read$.<init>(<console>:48)
>  at $line3.$read$.<clinit>(<console>)
>  at $line3.$eval$.$print$lzycompute(<console>:7)
>  at $line3.$eval$.$print(<console>:6)
>  at $line3.$eval.$print(<console>)
>  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>  at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>  at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498)
>  at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:745)
>  at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1021)
>  at scala.tools.nsc.interpreter.IMain.$anonfun$interpret$1(IMain.scala:574)
>  at 
> scala.reflect.internal.util.ScalaClassLoader.asContext(ScalaClassLoader.scala:41)
>  at 
> scala.reflect.internal.util.ScalaClassLoader.asContext$(ScalaClassLoader.scala:37)
>  at 
> scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:41)
>  at scala.tools.nsc.interpreter.IMain.loadAndRunReq$1(IMain.scala:573)
>  at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:600)
>  at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:570)
>  at scala.tools.nsc.interpreter.IMain.$anonfun$quietRun$1(IMain.scala:224)
>  at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:214)
>  at scala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:224)
>  at 
> org.apache.spark.repl.SparkILoop.$anonfun$initializeSpark$2(SparkILoop.scala:83)
>  at scala.collection.immutable.List.foreach(List.scala:392)
>  at 
> org.apache.spark.repl.SparkILoop.$anonfun$initializeSpark$1(SparkILoop.scala:83)
>  at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
>  at scala.tools.nsc.interpreter.ILoop.savingReplayStack(ILoop.scala:99)
>  at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:83)
>  at org.apache.spark.repl.SparkILoop.$anonfun$process$4(SparkILoop.scala:165)
>  at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
>  at scala.tools.nsc.interpreter.ILoop.$anonfun$mumly$1(ILoop.scala:168)
>  at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:214)
>  at scala.tools.nsc.interpreter.ILoop.mumly(ILoop.scala:165)
>  at org.apache.spark.repl.SparkILoop.loopPostInit$1(SparkILoop.scala:153)
>  at org.apache.spark.repl.SparkILoop.$anonfun$process$10(SparkILoop.scala:221)
>  at 
> org.apache.spark.repl.SparkILoop.withSuppressedSettings$1(SparkILoop.scala:189)
>  at org.apache.spark.repl.SparkILoop.startup$1(SparkILoop.scala:201)
>  at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:236)
>  at org.apache.spark.repl.Main$.doMain(Main.scala:78)
>  at org.apache.spark.repl.Main$.main(Main.scala:58)
>  at org.apache.spark.repl.Main.main(Main.scala)
>  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>  at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>  at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498)
>  at 
> org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
>  at 
> org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:993)
>  at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:183)
>  at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:206)
>  at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:93)
>  at 
> org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1072)
>  at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1081)
>  at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
>  



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to