Github user andrewor14 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/42#discussion_r10504353
  
    --- Diff: core/src/main/scala/org/apache/spark/SparkEnv.scala ---
    @@ -229,10 +243,60 @@ object SparkEnv extends Logging {
           broadcastManager,
           blockManager,
           connectionManager,
    +      securityManager,
           httpFileServer,
           sparkFilesDir,
           metricsSystem,
    -      conf,
    -      securityManager)
    +      conf)
    +  }
    +
    +  /**
    +   * Return a map representation of jvm information, Spark properties, 
system properties, and
    +   * class paths. Map keys define the category, and map values represent 
the corresponding
    +   * attributes as a sequence of KV pairs. This is used mainly for 
SparkListenerEnvironmentUpdate.
    +   */
    +  private[spark]
    +  def environmentDetails(
    +      conf: SparkConf,
    +      schedulingMode: String,
    +      addedJars: Seq[String],
    +      addedFiles: Seq[String]): Map[String, Seq[(String, String)]] = {
    +
    +    val jvmInformation = Seq(
    +      ("Java Version", "%s (%s)".format(Properties.javaVersion, 
Properties.javaVendor)),
    +      ("Java Home", Properties.javaHome),
    +      ("Scala Version", Properties.versionString),
    +      ("Scala Home", Properties.scalaHome)
    +    ).sorted
    +
    +    // Spark properties, including scheduling mode whether or not it is 
configured
    +    var additionalFields = Seq[(String, String)]()
    +    conf.getOption("spark.scheduler.mode").getOrElse {
    +      additionalFields ++= Seq(("spark.scheduler.mode", schedulingMode))
    +    }
    +    val sparkProperties = conf.getAll.sorted ++ additionalFields
    +
    +    // System properties that are not java classpaths
    +    val systemProperties = System.getProperties.iterator.toSeq
    +    val classPathProperty = systemProperties.find { case (k, v) =>
    +      k == "java.class.path"
    +    }.getOrElse(("", ""))
    +    val otherProperties = systemProperties.filter { case (k, v) =>
    +      k != "java.class.path" && !k.startsWith("spark.")
    +    }.sorted
    +
    +    // Class paths including all added jars and files
    +    val classPathEntries = classPathProperty._2
    +      .split(conf.get("path.separator", ":"))
    +      .filterNot(e => e.isEmpty)
    +      .map(e => (e, "System Classpath"))
    +    val addedJarsAndFiles = (addedJars ++ addedFiles).map((_, "Added By 
User"))
    +    val classPaths = (addedJarsAndFiles ++ classPathEntries).sorted
    +
    +    Map[String, Seq[(String, String)]](
    +      "JVM Information" -> jvmInformation,
    +      "Spark Properties" -> sparkProperties,
    +      "System Properties" -> otherProperties,
    +      "Classpath Entries" -> classPaths)
    --- End diff --
    
    This is taken largely from EnvironmentUI.scala


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

Reply via email to