Github user berngp commented on a diff in the pull request:

    https://github.com/apache/spark/pull/433#discussion_r11920723
  
    --- Diff: 
yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala ---
    @@ -354,63 +354,85 @@ trait ClientBase extends Logging {
       }
     }
     
    -object ClientBase {
    +object ClientBase extends Logging {
       val SPARK_JAR: String = "spark.jar"
       val APP_JAR: String = "app.jar"
       val LOG4J_PROP: String = "log4j.properties"
       val LOG4J_CONF_ENV_KEY: String = "SPARK_LOG4J_CONF"
       val LOCAL_SCHEME = "local"
     
    -  // Based on code from org.apache.hadoop.mapreduce.v2.util.MRApps
    -  def populateHadoopClasspath(conf: Configuration, env: HashMap[String, 
String]) {
    -    val classpathEntries = Option(conf.getStrings(
    -      YarnConfiguration.YARN_APPLICATION_CLASSPATH)).getOrElse(
    -        getDefaultYarnApplicationClasspath())
    -    for (c <- classpathEntries) {
    -      YarnSparkHadoopUtil.addToEnvironment(env, 
Environment.CLASSPATH.name, c.trim,
    +  def populateHadoopClasspath(conf: Configuration, env: HashMap[String, 
String]) = {
    +    val classPathElementsToAdd = getYarnAppClasspath(conf) ++ 
getMRAppClasspath(conf)
    +    for (c <- classPathElementsToAdd.flatten) {
    +      YarnSparkHadoopUtil.addToEnvironment(
    +        env,
    +        Environment.CLASSPATH.name,
    +        c.trim,
             File.pathSeparator)
         }
    +    classPathElementsToAdd
    +  }
     
    -    val mrClasspathEntries = Option(conf.getStrings(
    -      "mapreduce.application.classpath")).getOrElse(
    -        getDefaultMRApplicationClasspath())
    -    if (mrClasspathEntries != null) {
    -      for (c <- mrClasspathEntries) {
    -        YarnSparkHadoopUtil.addToEnvironment(env, 
Environment.CLASSPATH.name, c.trim,
    -          File.pathSeparator)
    -      }
    -    }
    +  private def getYarnAppClasspath(conf: Configuration): 
Option[Seq[String]] =
    +    Option(conf.getStrings(YarnConfiguration.YARN_APPLICATION_CLASSPATH)) 
match {
    +      case Some(s) => Some(s.toSeq)
    +      case None => getDefaultYarnApplicationClasspath
       }
     
    -  def getDefaultYarnApplicationClasspath(): Array[String] = {
    -    try {
    -      val field = 
classOf[MRJobConfig].getField("DEFAULT_YARN_APPLICATION_CLASSPATH")
    -      field.get(null).asInstanceOf[Array[String]]
    -    } catch {
    -      case err: NoSuchFieldError => null
    -      case err: NoSuchFieldException => null
    +  private def getMRAppClasspath(conf: Configuration): Option[Seq[String]] =
    +    Option(conf.getStrings("mapreduce.application.classpath")) match {
    +      case Some(s) => Some(s.toSeq)
    +      case None => getDefaultMRApplicationClasspath
    +    }
    +
    +  def getDefaultYarnApplicationClasspath: Option[Seq[String]] = {
    +    val triedDefault = Try[Seq[String]] {
    +      val field = 
classOf[YarnConfiguration].getField("DEFAULT_YARN_APPLICATION_CLASSPATH")
    +      val value = field.get(null).asInstanceOf[Array[String]]
    +      value.toSeq
    +    } recoverWith {
    +      case e: NoSuchFieldException => Success(Seq.empty[String])
         }
    +
    +    triedDefault match {
    +      case f: Failure[_] =>
    +        logError("Unable to obtain the default YARN Application 
classpath.", f.exception)
    +      case s: Success[_] =>
    +        logDebug(s"Using the default YARN application classpath: 
${s.get.mkString(",")}")
    +    }
    +
    +    triedDefault.toOption
       }
     
       /**
        * In Hadoop 0.23, the MR application classpath comes with the YARN 
application
        * classpath.  In Hadoop 2.0, it's an array of Strings, and in 2.2+ it's 
a String.
    -   * So we need to use reflection to retrieve it.
    +   * So we need to use reflection to retrieve it
        */
    -  def getDefaultMRApplicationClasspath(): Array[String] = {
    -    try {
    +  def getDefaultMRApplicationClasspath: Option[Seq[String]] = {
    +    val triedDefault = Try[Seq[String]] {
           val field = 
classOf[MRJobConfig].getField("DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH")
    -      if (field.getType == classOf[String]) {
    -        StringUtils.getStrings(field.get(null).asInstanceOf[String])
    +      val value = if (field.getType == classOf[String]) {
    +        
StringUtils.getStrings(field.get(null).asInstanceOf[String]).toArray
           } else {
             field.get(null).asInstanceOf[Array[String]]
           }
    -    } catch {
    -      case err: NoSuchFieldError => null
    --- End diff --
    
    @sryza I removed the `NoSuchFieldError` since in this context it can't 
happen, both classes and fields are accessed through reflection.
    
    A linkage error will occur when lets say you have a dependency flagged as 
provided and your code points to one if its Class fields, for example 
`MRJobConfig.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH`. Now since your version 
of such dependency has that class and field you can compile it. The error is 
thrown when at runtime the provided `MRJobConfig` is missing that field, ergo a 
linkage error.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

Reply via email to