Github user andrewor14 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/3916#discussion_r25286832
  
    --- Diff: bin/spark-submit ---
    @@ -17,58 +17,5 @@
     # limitations under the License.
     #
     
    -# NOTE: Any changes in this file must be reflected in 
SparkSubmitDriverBootstrapper.scala!
    -
    -export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
    -ORIG_ARGS=("$@")
    -
    -# Set COLUMNS for progress bar
    -export COLUMNS=`tput cols`
    -
    -while (($#)); do
    -  if [ "$1" = "--deploy-mode" ]; then
    -    SPARK_SUBMIT_DEPLOY_MODE=$2
    -  elif [ "$1" = "--properties-file" ]; then
    -    SPARK_SUBMIT_PROPERTIES_FILE=$2
    -  elif [ "$1" = "--driver-memory" ]; then
    -    export SPARK_SUBMIT_DRIVER_MEMORY=$2
    -  elif [ "$1" = "--driver-library-path" ]; then
    -    export SPARK_SUBMIT_LIBRARY_PATH=$2
    -  elif [ "$1" = "--driver-class-path" ]; then
    -    export SPARK_SUBMIT_CLASSPATH=$2
    -  elif [ "$1" = "--driver-java-options" ]; then
    -    export SPARK_SUBMIT_OPTS=$2
    -  elif [ "$1" = "--master" ]; then
    -    export MASTER=$2
    -  fi
    -  shift
    -done
    -
    -if [ -z "$SPARK_CONF_DIR" ]; then
    -  export SPARK_CONF_DIR="$SPARK_HOME/conf"
    -fi
    -DEFAULT_PROPERTIES_FILE="$SPARK_CONF_DIR/spark-defaults.conf"
    -if [ "$MASTER" == "yarn-cluster" ]; then
    -  SPARK_SUBMIT_DEPLOY_MODE=cluster
    -fi
    -export SPARK_SUBMIT_DEPLOY_MODE=${SPARK_SUBMIT_DEPLOY_MODE:-"client"}
    -export 
SPARK_SUBMIT_PROPERTIES_FILE=${SPARK_SUBMIT_PROPERTIES_FILE:-"$DEFAULT_PROPERTIES_FILE"}
    -
    -# For client mode, the driver will be launched in the same JVM that 
launches
    -# SparkSubmit, so we may need to read the properties file for any extra 
class
    -# paths, library paths, java options and memory early on. Otherwise, it 
will
    -# be too late by the time the driver JVM has started.
    -
    -if [[ "$SPARK_SUBMIT_DEPLOY_MODE" == "client" && -f 
"$SPARK_SUBMIT_PROPERTIES_FILE" ]]; then
    -  # Parse the properties file only if the special configs exist
    -  contains_special_configs=$(
    -    grep -e "spark.driver.extra*\|spark.driver.memory" 
"$SPARK_SUBMIT_PROPERTIES_FILE" | \
    -    grep -v "^[[:space:]]*#"
    -  )
    -  if [ -n "$contains_special_configs" ]; then
    -    export SPARK_SUBMIT_BOOTSTRAP_DRIVER=1
    -  fi
    -fi
    -
    -exec "$SPARK_HOME"/bin/spark-class org.apache.spark.deploy.SparkSubmit 
"${ORIG_ARGS[@]}"
    -
    +SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
    --- End diff --
    
    export?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to