Github user vanzin commented on a diff in the pull request:
https://github.com/apache/spark/pull/2350#discussion_r17438506
--- Diff:
yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala ---
@@ -598,46 +675,44 @@ object ClientBase extends Logging {
* @param fileName Alternate name for the file (optional).
* @param env Map holding the environment variables.
*/
- private def addFileToClasspath(path: String, fileName: String,
- env: HashMap[String, String]) : Unit = {
+ private def addFileToClasspath(
+ path: String,
+ fileName: String,
+ env: HashMap[String, String]): Unit = {
if (path != null) {
scala.util.control.Exception.ignoring(classOf[URISyntaxException]) {
- val localPath = getLocalPath(path)
- if (localPath != null) {
- addClasspathEntry(localPath, env)
+ val uri = new URI(path)
+ if (uri.getScheme == LOCAL_SCHEME) {
+ addClasspathEntry(uri.getPath, env)
return
}
}
}
if (fileName != null) {
- addClasspathEntry(Environment.PWD.$() + Path.SEPARATOR + fileName,
env);
+ addClasspathEntry(Environment.PWD.$() + Path.SEPARATOR + fileName,
env)
}
}
/**
- * Returns the local path if the URI is a "local:" URI, or null
otherwise.
+ * Add the given path to the classpath entry of the given environment
map.
+ * If the classpath is already set, this appends the new path to the
existing classpath.
*/
- private def getLocalPath(resource: String): String = {
- val uri = new URI(resource)
- if (LOCAL_SCHEME.equals(uri.getScheme())) {
- return uri.getPath()
- }
- null
- }
-
- private def addClasspathEntry(path: String, env: HashMap[String,
String]) =
- YarnSparkHadoopUtil.addToEnvironment(env, Environment.CLASSPATH.name,
path,
- File.pathSeparator)
+ private def addClasspathEntry(path: String, env: HashMap[String,
String]): Unit =
+ YarnSparkHadoopUtil.addPathToEnvironment(env,
Environment.CLASSPATH.name, path)
/**
* Get the list of namenodes the user may access.
*/
- private[yarn] def getNameNodesToAccess(sparkConf: SparkConf): Set[Path]
= {
- sparkConf.get("spark.yarn.access.namenodes",
"").split(",").map(_.trim()).filter(!_.isEmpty)
- .map(new Path(_)).toSet
+ def getNameNodesToAccess(sparkConf: SparkConf): Set[Path] = {
--- End diff --
Can these be left as private or `private[something]`?
(On the other hand, the object is already `private[spark]`, so maybe all
methods could have the modifier removed.)
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]