Github user vanzin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/2516#discussion_r18911428
  
    --- Diff: 
core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala ---
    @@ -398,22 +478,117 @@ private[spark] class SparkSubmitArguments(args: 
Seq[String], env: Map[String, St
       }
     }
     
    -object SparkSubmitArguments {
    -  /** Load properties present in the given file. */
    -  def getPropertiesFromFile(file: File): Seq[(String, String)] = {
    -    require(file.exists(), s"Properties file $file does not exist")
    -    require(file.isFile(), s"Properties file $file is not a normal file")
    -    val inputStream = new FileInputStream(file)
    +private[spark] object SparkSubmitArguments {
    +  /**
    +   * Default property values - string literals are defined in 
ConfigConstants.scala
    +   */
    +  val DEFAULTS = Map(
    +    SPARK_MASTER -> "local[*]",
    +    SPARK_VERBOSE -> "false",
    +    SPARK_DEPLOY_MODE -> "client",
    +    SPARK_EXECUTOR_MEMORY -> "1g",
    +    SPARK_EXECUTOR_CORES -> "1" ,
    +    SPARK_EXECUTOR_INSTANCES -> "2",
    +    SPARK_DRIVER_MEMORY -> "512m",
    +    SPARK_DRIVER_CORES -> "1",
    +    SPARK_DRIVER_SUPERVISE -> "false",
    +    SPARK_YARN_QUEUE -> "default",
    +    SPARK_EXECUTOR_INSTANCES -> "2"
    +  )
    +
    +  /**
    +   * Config items that should only be set from the command line
    +   */
    +  val CMD_LINE_ONLY_KEYS = Set (
    +    SPARK_VERBOSE,
    +    SPARK_APP_CLASS,
    +    SPARK_APP_PRIMARY_RESOURCE
    +  )
    +
    +  /**
    +   * Used to support legacy environment variable mappings
    +   */
    +  val LEGACY_ENV_VARS = Map (
    +    "MASTER" -> SPARK_MASTER,
    +    "DEPLOY_MODE" -> SPARK_DEPLOY_MODE,
    +    "SPARK_DRIVER_MEMORY" -> SPARK_DRIVER_MEMORY,
    +    "SPARK_EXECUTOR_MEMORY" -> SPARK_EXECUTOR_MEMORY
    +  )
    +
    +  /**
    +   * Function returns the spark submit default config map 
(Map[configName->ConfigValue])
    +   * Function is over-writable to allow for easier debugging
    +   */
    +  private[spark] var getHardCodedDefaultValues: () => Map[String, String] 
= () => {
    --- End diff --
    
    This feels like a long way around to achieve something simple: why not just 
an argument to `mergeSparkProperties()` with the default values?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to