Github user tgravescs commented on a diff in the pull request:

    https://github.com/apache/spark/pull/303#discussion_r11345451
  
    --- Diff: 
yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala ---
    @@ -426,29 +410,86 @@ object ClientBase {
         }
       }
     
    -  def populateClasspath(conf: Configuration, sparkConf: SparkConf, 
addLog4j: Boolean, env: HashMap[String, String]) {
    -    Apps.addToEnvironment(env, Environment.CLASSPATH.name, 
Environment.PWD.$())
    -    // If log4j present, ensure ours overrides all others
    -    if (addLog4j) {
    -      Apps.addToEnvironment(env, Environment.CLASSPATH.name, 
Environment.PWD.$() +
    -        Path.SEPARATOR + LOG4J_PROP)
    +  def populateClasspath(args: ClientArguments, conf: Configuration, 
sparkConf: SparkConf,
    +      log4jConf: String, env: HashMap[String, String]) {
    +    if (log4jConf != null) {
    +      // If a custom log4j config file is provided, add its parent 
directory to the
    +      // classpath. Note that for the "local:" case, this only works if 
the custom config's
    +      // file name is "log4j.properties".
    +      val localPath = getLocalPath(log4jConf)
    +      if (localPath != null) {
    +        val parentPath = new File(localPath).getParent()
    +        Apps.addToEnvironment(env, Environment.CLASSPATH.name, parentPath)
    +      } else {
    +        Apps.addToEnvironment(env, Environment.CLASSPATH.name,
    +          Environment.PWD.$() + Path.SEPARATOR)
    +      }
         }
         // Normally the users app.jar is last in case conflicts with spark jars
         val userClasspathFirst = 
sparkConf.get("spark.yarn.user.classpath.first", "false")
           .toBoolean
         if (userClasspathFirst) {
    -      Apps.addToEnvironment(env, Environment.CLASSPATH.name, 
Environment.PWD.$() +
    -        Path.SEPARATOR + APP_JAR)
    +      addUserClasspath(args, env)
         }
    -    Apps.addToEnvironment(env, Environment.CLASSPATH.name, 
Environment.PWD.$() +
    -      Path.SEPARATOR + SPARK_JAR)
    +    addClasspathEntry(System.getenv("SPARK_JAR"), SPARK_JAR, env);
         ClientBase.populateHadoopClasspath(conf, env)
    -
         if (!userClasspathFirst) {
    -      Apps.addToEnvironment(env, Environment.CLASSPATH.name, 
Environment.PWD.$() +
    -        Path.SEPARATOR + APP_JAR)
    +      addUserClasspath(args, env)
         }
    -    Apps.addToEnvironment(env, Environment.CLASSPATH.name, 
Environment.PWD.$() +
    -      Path.SEPARATOR + "*")
    +    Apps.addToEnvironment(env, Environment.CLASSPATH.name,
    +      Environment.PWD.$() + Path.SEPARATOR + "*")
       }
    +
    +  private def addUserClasspath(args: ClientArguments, env: Map[String, 
String]) = {
    +    if (args != null) {
    +      addClasspathEntry(args.userJar, APP_JAR, env)
    +    }
    +
    +    if (args != null && args.addJars != null) {
    --- End diff --
    
    why was this added?  addJars puts the jars in the top level directory and 
the $PWD/* picks them up.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

Reply via email to