Repository: spark Updated Branches: refs/heads/master 8ff474f6e -> ab6f60c4d
[SPARK-22585][CORE] Path in addJar is not url encoded ## What changes were proposed in this pull request? This updates a behavior of `addJar` method of `sparkContext` class. If path without any scheme is passed as input it is used literally without url encoding/decoding it. ## How was this patch tested? A unit test is added for this. Author: Jakub Dubovsky <jakub.dubov...@seznam.cz> Closes #19834 from james64/SPARK-22585-encode-add-jar. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ab6f60c4 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ab6f60c4 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ab6f60c4 Branch: refs/heads/master Commit: ab6f60c4d6417cbb0240216a6b492aadcca3043e Parents: 8ff474f Author: Jakub Dubovsky <jakub.dubov...@seznam.cz> Authored: Thu Nov 30 10:24:30 2017 +0900 Committer: hyukjinkwon <gurwls...@gmail.com> Committed: Thu Nov 30 10:24:30 2017 +0900 ---------------------------------------------------------------------- core/src/main/scala/org/apache/spark/SparkContext.scala | 6 +++++- .../test/scala/org/apache/spark/SparkContextSuite.scala | 11 +++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/ab6f60c4/core/src/main/scala/org/apache/spark/SparkContext.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 984dd0a..c174939 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -1837,7 +1837,11 @@ class SparkContext(config: SparkConf) extends Logging { Utils.validateURL(uri) uri.getScheme match { // A JAR file which exists only on the driver node - case null | "file" => addJarFile(new File(uri.getPath)) + case null => + // SPARK-22585 path without schema is not url encoded + addJarFile(new File(uri.getRawPath)) + // A JAR file which exists only on the driver node + case "file" => addJarFile(new File(uri.getPath)) // A JAR file which exists locally on every worker node case "local" => "file:" + uri.getPath case _ => path http://git-wip-us.apache.org/repos/asf/spark/blob/ab6f60c4/core/src/test/scala/org/apache/spark/SparkContextSuite.scala ---------------------------------------------------------------------- diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala index 0ed5f26..2bde875 100644 --- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala @@ -309,6 +309,17 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu assert(sc.listJars().head.contains(tmpJar.getName)) } + test("SPARK-22585 addJar argument without scheme is interpreted literally without url decoding") { + val tmpDir = new File(Utils.createTempDir(), "host%3A443") + tmpDir.mkdirs() + val tmpJar = File.createTempFile("t%2F", ".jar", tmpDir) + + sc = new SparkContext("local", "test") + + sc.addJar(tmpJar.getAbsolutePath) + assert(sc.listJars().size === 1) + } + test("Cancelling job group should not cause SparkContext to shutdown (SPARK-6414)") { try { sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local")) --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org