Repository: spark
Updated Branches:
  refs/heads/branch-1.5 6dc23e626 -> cd55fbcc2


[SPARK-11023] [YARN] Avoid creating URIs from local paths directly.

The issue is that local paths on Windows, when provided with drive
letters or backslashes, are not valid URIs.

Instead of trying to figure out whether paths are URIs or not, use
Utils.resolveURI() which does that for us.

Author: Marcelo Vanzin <van...@cloudera.com>

Closes #9049 from vanzin/SPARK-11023 and squashes the following commits:

77021f2 [Marcelo Vanzin] [SPARK-11023] [yarn] Avoid creating URIs from local 
paths directly.

(cherry picked from commit 149472a01d12828c64b0a852982d48c123984182)


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/cd55fbcc
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/cd55fbcc
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/cd55fbcc

Branch: refs/heads/branch-1.5
Commit: cd55fbcc2757264f92a1618ef4a8908c2c781bc6
Parents: 6dc23e6
Author: Marcelo Vanzin <van...@cloudera.com>
Authored: Mon Oct 12 10:21:57 2015 -0700
Committer: Marcelo Vanzin <van...@cloudera.com>
Committed: Mon Oct 12 10:42:06 2015 -0700

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/deploy/yarn/Client.scala | 11 ++++++-----
 1 file changed, 6 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/cd55fbcc/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
----------------------------------------------------------------------
diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala 
b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
index 28228c2..f2e1c2b 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
@@ -318,7 +318,8 @@ private[spark] class Client(
         destName: Option[String] = None,
         targetDir: Option[String] = None,
         appMasterOnly: Boolean = false): (Boolean, String) = {
-      val localURI = new URI(path.trim())
+      val trimmedPath = path.trim()
+      val localURI = Utils.resolveURI(trimmedPath)
       if (localURI.getScheme != LOCAL_SCHEME) {
         if (addDistributedUri(localURI)) {
           val localPath = getQualifiedLocalPath(localURI, hadoopConf)
@@ -334,7 +335,7 @@ private[spark] class Client(
           (false, null)
         }
       } else {
-        (true, path.trim())
+        (true, trimmedPath)
       }
     }
 
@@ -555,10 +556,10 @@ private[spark] class Client(
         LOCALIZED_PYTHON_DIR)
     }
     (pySparkArchives ++ pyArchives).foreach { path =>
-      val uri = new URI(path)
+      val uri = Utils.resolveURI(path)
       if (uri.getScheme != LOCAL_SCHEME) {
         pythonPath += 
buildPath(YarnSparkHadoopUtil.expandEnvironment(Environment.PWD),
-          new Path(path).getName())
+          new Path(uri).getName())
       } else {
         pythonPath += uri.getPath()
       }
@@ -1175,7 +1176,7 @@ object Client extends Logging {
 
   private def getMainJarUri(mainJar: Option[String]): Option[URI] = {
     mainJar.flatMap { path =>
-      val uri = new URI(path)
+      val uri = Utils.resolveURI(path)
       if (uri.getScheme == LOCAL_SCHEME) Some(uri) else None
     }.orElse(Some(new URI(APP_JAR)))
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to