07ARB commented on a change in pull request #26773:
[SPARK-30126][CORE]sparkContext.addFile and sparkContext.addJar fails when file
path contains spaces
URL: https://github.com/apache/spark/pull/26773#discussion_r356367431
##########
File path: core/src/main/scala/org/apache/spark/SparkContext.scala
##########
@@ -1870,21 +1870,21 @@ class SparkContext(config: SparkConf) extends Logging {
}
}
- if (path == null) {
- logWarning("null specified as parameter to addJar")
+ if (path == null || path.isEmpty) {
+ logWarning("null or empty path specified as parameter to addJar")
} else {
val key = if (path.contains("\\")) {
// For local paths with backslashes on Windows, URI throws an exception
addLocalJarFile(new File(path))
} else {
- val uri = new URI(path)
+ val uri = new Path(path).toUri
// SPARK-17650: Make sure this is a valid URL before adding it to the
list of dependencies
Utils.validateURL(uri)
uri.getScheme match {
// A JAR file which exists only on the driver node
case null =>
// SPARK-22585 path without schema is not url encoded
- addLocalJarFile(new File(uri.getRawPath))
+ addLocalJarFile(new File(uri.getPath))
Review comment:
i run in my local some failure suite file like CachedTableSuite and
HiveMetastoreCatalogSuite all test case passed
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]