Repository: spark
Updated Branches:
  refs/heads/master 2692bdb7d -> 8aff36e91


[SPARK-2960][DEPLOY] Support executing Spark from symlinks (reopen)

This PR is based on the work of roji to support running Spark scripts from 
symlinks. Thanks for the great work roji . Would you mind taking a look at this 
PR, thanks a lot.

For releases like HDP and others, normally it will expose the Spark executables 
as symlinks and put in `PATH`, but current Spark's scripts do not support 
finding real path from symlink recursively, this will make spark fail to 
execute from symlink. This PR try to solve this issue by finding the absolute 
path from symlink.

Instead of using `readlink -f` like what this PR 
(https://github.com/apache/spark/pull/2386) implemented is that `-f` is not 
support for Mac, so here manually seeking the path through loop.

I've tested with Mac and Linux (Cent OS), looks fine.

This PR did not fix the scripts under `sbin` folder, not sure if it needs to be 
fixed also?

Please help to review, any comment is greatly appreciated.

Author: jerryshao <ss...@hortonworks.com>
Author: Shay Rojansky <r...@roji.org>

Closes #8669 from jerryshao/SPARK-2960.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/8aff36e9
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/8aff36e9
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/8aff36e9

Branch: refs/heads/master
Commit: 8aff36e91de0fee2f3f56c6d240bb203b5bb48ba
Parents: 2692bdb
Author: jerryshao <ss...@hortonworks.com>
Authored: Wed Nov 4 10:49:34 2015 +0000
Committer: Sean Owen <so...@cloudera.com>
Committed: Wed Nov 4 10:49:34 2015 +0000

----------------------------------------------------------------------
 bin/beeline                         |  8 +++++---
 bin/load-spark-env.sh               | 32 ++++++++++++++++++--------------
 bin/pyspark                         | 14 ++++++++------
 bin/run-example                     | 18 ++++++++++--------
 bin/spark-class                     | 15 ++++++++-------
 bin/spark-shell                     |  9 ++++++---
 bin/spark-sql                       |  7 +++++--
 bin/spark-submit                    |  6 ++++--
 bin/sparkR                          |  9 ++++++---
 sbin/slaves.sh                      |  9 +++++----
 sbin/spark-config.sh                | 23 +++++++----------------
 sbin/spark-daemon.sh                | 23 ++++++++++++-----------
 sbin/spark-daemons.sh               |  9 +++++----
 sbin/start-all.sh                   | 11 ++++++-----
 sbin/start-history-server.sh        | 11 ++++++-----
 sbin/start-master.sh                | 17 +++++++++--------
 sbin/start-mesos-dispatcher.sh      | 11 ++++++-----
 sbin/start-mesos-shuffle-service.sh | 11 ++++++-----
 sbin/start-shuffle-service.sh       | 11 ++++++-----
 sbin/start-slave.sh                 | 18 +++++++++---------
 sbin/start-slaves.sh                | 19 +++++++++----------
 sbin/start-thriftserver.sh          | 11 ++++++-----
 sbin/stop-all.sh                    | 14 +++++++-------
 sbin/stop-history-server.sh         |  7 ++++---
 sbin/stop-master.sh                 | 13 +++++++------
 sbin/stop-mesos-dispatcher.sh       |  9 +++++----
 sbin/stop-mesos-shuffle-service.sh  |  7 ++++---
 sbin/stop-shuffle-service.sh        |  7 ++++---
 sbin/stop-slave.sh                  | 15 ++++++++-------
 sbin/stop-slaves.sh                 | 15 ++++++++-------
 sbin/stop-thriftserver.sh           |  7 ++++---
 31 files changed, 213 insertions(+), 183 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/bin/beeline
----------------------------------------------------------------------
diff --git a/bin/beeline b/bin/beeline
index 3fcb6df..1627626 100755
--- a/bin/beeline
+++ b/bin/beeline
@@ -23,8 +23,10 @@
 # Enter posix mode for bash
 set -o posix
 
-# Figure out where Spark is installed
-FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
+# Figure out if SPARK_HOME is set
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 CLASS="org.apache.hive.beeline.BeeLine"
-exec "$FWDIR/bin/spark-class" $CLASS "$@"
+exec "${SPARK_HOME}/bin/spark-class" $CLASS "$@"

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/bin/load-spark-env.sh
----------------------------------------------------------------------
diff --git a/bin/load-spark-env.sh b/bin/load-spark-env.sh
index 95779e9..eaea964 100644
--- a/bin/load-spark-env.sh
+++ b/bin/load-spark-env.sh
@@ -20,13 +20,17 @@
 # This script loads spark-env.sh if it exists, and ensures it is only loaded 
once.
 # spark-env.sh is loaded from SPARK_CONF_DIR if set, or within the current 
directory's
 # conf/ subdirectory.
-FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
+
+# Figure out where Spark is installed
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 if [ -z "$SPARK_ENV_LOADED" ]; then
   export SPARK_ENV_LOADED=1
 
   # Returns the parent of the directory this script lives in.
-  parent_dir="$(cd "`dirname "$0"`"/..; pwd)"
+  parent_dir="${SPARK_HOME}"
 
   user_conf_dir="${SPARK_CONF_DIR:-"$parent_dir"/conf}"
 
@@ -42,18 +46,18 @@ fi
 
 if [ -z "$SPARK_SCALA_VERSION" ]; then
 
-    ASSEMBLY_DIR2="$FWDIR/assembly/target/scala-2.11"
-    ASSEMBLY_DIR1="$FWDIR/assembly/target/scala-2.10"
+  ASSEMBLY_DIR2="${SPARK_HOME}/assembly/target/scala-2.11"
+  ASSEMBLY_DIR1="${SPARK_HOME}/assembly/target/scala-2.10"
 
-    if [[ -d "$ASSEMBLY_DIR2" && -d "$ASSEMBLY_DIR1" ]]; then
-        echo -e "Presence of build for both scala versions(SCALA 2.10 and 
SCALA 2.11) detected." 1>&2
-        echo -e 'Either clean one of them or, export SPARK_SCALA_VERSION=2.11 
in spark-env.sh.' 1>&2
-        exit 1
-    fi
+  if [[ -d "$ASSEMBLY_DIR2" && -d "$ASSEMBLY_DIR1" ]]; then
+    echo -e "Presence of build for both scala versions(SCALA 2.10 and SCALA 
2.11) detected." 1>&2
+    echo -e 'Either clean one of them or, export SPARK_SCALA_VERSION=2.11 in 
spark-env.sh.' 1>&2
+    exit 1
+  fi
 
-    if [ -d "$ASSEMBLY_DIR2" ]; then
-        export SPARK_SCALA_VERSION="2.11"
-    else
-        export SPARK_SCALA_VERSION="2.10"
-    fi
+  if [ -d "$ASSEMBLY_DIR2" ]; then
+    export SPARK_SCALA_VERSION="2.11"
+  else
+    export SPARK_SCALA_VERSION="2.10"
+  fi
 fi

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/bin/pyspark
----------------------------------------------------------------------
diff --git a/bin/pyspark b/bin/pyspark
index 18012ee..5eaa17d 100755
--- a/bin/pyspark
+++ b/bin/pyspark
@@ -17,9 +17,11 @@
 # limitations under the License.
 #
 
-export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-source "$SPARK_HOME"/bin/load-spark-env.sh
+source "${SPARK_HOME}"/bin/load-spark-env.sh
 export _SPARK_CMD_USAGE="Usage: ./bin/pyspark [options]"
 
 # In Spark <= 1.1, setting IPYTHON=1 would cause the driver to be launched 
using the `ipython`
@@ -64,12 +66,12 @@ fi
 export PYSPARK_PYTHON
 
 # Add the PySpark classes to the Python path:
-export PYTHONPATH="$SPARK_HOME/python/:$PYTHONPATH"
-export PYTHONPATH="$SPARK_HOME/python/lib/py4j-0.9-src.zip:$PYTHONPATH"
+export PYTHONPATH="${SPARK_HOME}/python/:$PYTHONPATH"
+export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.9-src.zip:$PYTHONPATH"
 
 # Load the PySpark shell.py script when ./pyspark is used interactively:
 export OLD_PYTHONSTARTUP="$PYTHONSTARTUP"
-export PYTHONSTARTUP="$SPARK_HOME/python/pyspark/shell.py"
+export PYTHONSTARTUP="${SPARK_HOME}/python/pyspark/shell.py"
 
 # For pyspark tests
 if [[ -n "$SPARK_TESTING" ]]; then
@@ -82,4 +84,4 @@ fi
 
 export PYSPARK_DRIVER_PYTHON
 export PYSPARK_DRIVER_PYTHON_OPTS
-exec "$SPARK_HOME"/bin/spark-submit pyspark-shell-main --name "PySparkShell" 
"$@"
+exec "${SPARK_HOME}"/bin/spark-submit pyspark-shell-main --name "PySparkShell" 
"$@"

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/bin/run-example
----------------------------------------------------------------------
diff --git a/bin/run-example b/bin/run-example
index 798e2ca..e1b0d57 100755
--- a/bin/run-example
+++ b/bin/run-example
@@ -17,11 +17,13 @@
 # limitations under the License.
 #
 
-FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
-export SPARK_HOME="$FWDIR"
-EXAMPLES_DIR="$FWDIR"/examples
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+EXAMPLES_DIR="${SPARK_HOME}"/examples
 
-. "$FWDIR"/bin/load-spark-env.sh
+. "${SPARK_HOME}"/bin/load-spark-env.sh
 
 if [ -n "$1" ]; then
   EXAMPLE_CLASS="$1"
@@ -34,8 +36,8 @@ else
   exit 1
 fi
 
-if [ -f "$FWDIR/RELEASE" ]; then
-  JAR_PATH="${FWDIR}/lib"
+if [ -f "${SPARK_HOME}/RELEASE" ]; then
+  JAR_PATH="${SPARK_HOME}/lib"
 else
   JAR_PATH="${EXAMPLES_DIR}/target/scala-${SPARK_SCALA_VERSION}"
 fi
@@ -44,7 +46,7 @@ JAR_COUNT=0
 
 for f in "${JAR_PATH}"/spark-examples-*hadoop*.jar; do
   if [[ ! -e "$f" ]]; then
-    echo "Failed to find Spark examples assembly in $FWDIR/lib or 
$FWDIR/examples/target" 1>&2
+    echo "Failed to find Spark examples assembly in ${SPARK_HOME}/lib or 
${SPARK_HOME}/examples/target" 1>&2
     echo "You need to build Spark before running this program" 1>&2
     exit 1
   fi
@@ -67,7 +69,7 @@ if [[ ! $EXAMPLE_CLASS == org.apache.spark.examples* ]]; then
   EXAMPLE_CLASS="org.apache.spark.examples.$EXAMPLE_CLASS"
 fi
 
-exec "$FWDIR"/bin/spark-submit \
+exec "${SPARK_HOME}"/bin/spark-submit \
   --master $EXAMPLE_MASTER \
   --class $EXAMPLE_CLASS \
   "$SPARK_EXAMPLES_JAR" \

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/bin/spark-class
----------------------------------------------------------------------
diff --git a/bin/spark-class b/bin/spark-class
index 8cae6cc..87d0669 100755
--- a/bin/spark-class
+++ b/bin/spark-class
@@ -17,10 +17,11 @@
 # limitations under the License.
 #
 
-# Figure out where Spark is installed
-export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$SPARK_HOME"/bin/load-spark-env.sh
+. "${SPARK_HOME}"/bin/load-spark-env.sh
 
 # Find the java binary
 if [ -n "${JAVA_HOME}" ]; then
@@ -36,10 +37,10 @@ fi
 
 # Find assembly jar
 SPARK_ASSEMBLY_JAR=
-if [ -f "$SPARK_HOME/RELEASE" ]; then
-  ASSEMBLY_DIR="$SPARK_HOME/lib"
+if [ -f "${SPARK_HOME}/RELEASE" ]; then
+  ASSEMBLY_DIR="${SPARK_HOME}/lib"
 else
-  ASSEMBLY_DIR="$SPARK_HOME/assembly/target/scala-$SPARK_SCALA_VERSION"
+  ASSEMBLY_DIR="${SPARK_HOME}/assembly/target/scala-$SPARK_SCALA_VERSION"
 fi
 
 GREP_OPTIONS=
@@ -65,7 +66,7 @@ LAUNCH_CLASSPATH="$SPARK_ASSEMBLY_JAR"
 
 # Add the launcher build dir to the classpath if requested.
 if [ -n "$SPARK_PREPEND_CLASSES" ]; then
-  
LAUNCH_CLASSPATH="$SPARK_HOME/launcher/target/scala-$SPARK_SCALA_VERSION/classes:$LAUNCH_CLASSPATH"
+  
LAUNCH_CLASSPATH="${SPARK_HOME}/launcher/target/scala-$SPARK_SCALA_VERSION/classes:$LAUNCH_CLASSPATH"
 fi
 
 export _SPARK_ASSEMBLY="$SPARK_ASSEMBLY_JAR"

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/bin/spark-shell
----------------------------------------------------------------------
diff --git a/bin/spark-shell b/bin/spark-shell
index 00ab7af..6583b5b 100755
--- a/bin/spark-shell
+++ b/bin/spark-shell
@@ -28,7 +28,10 @@ esac
 # Enter posix mode for bash
 set -o posix
 
-export FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
 export _SPARK_CMD_USAGE="Usage: ./bin/spark-shell [options]"
 
 # SPARK-4161: scala does not assume use of the java classpath,
@@ -47,11 +50,11 @@ function main() {
     # (see https://github.com/sbt/sbt/issues/562).
     stty -icanon min 1 -echo > /dev/null 2>&1
     export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Djline.terminal=unix"
-    "$FWDIR"/bin/spark-submit --class org.apache.spark.repl.Main --name "Spark 
shell" "$@"
+    "${SPARK_HOME}"/bin/spark-submit --class org.apache.spark.repl.Main --name 
"Spark shell" "$@"
     stty icanon echo > /dev/null 2>&1
   else
     export SPARK_SUBMIT_OPTS
-    "$FWDIR"/bin/spark-submit --class org.apache.spark.repl.Main --name "Spark 
shell" "$@"
+    "${SPARK_HOME}"/bin/spark-submit --class org.apache.spark.repl.Main --name 
"Spark shell" "$@"
   fi
 }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/bin/spark-sql
----------------------------------------------------------------------
diff --git a/bin/spark-sql b/bin/spark-sql
index 4ea7bc6..970d12c 100755
--- a/bin/spark-sql
+++ b/bin/spark-sql
@@ -17,6 +17,9 @@
 # limitations under the License.
 #
 
-export FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
 export _SPARK_CMD_USAGE="Usage: ./bin/spark-sql [options] [cli option]"
-exec "$FWDIR"/bin/spark-submit --class 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver "$@"
+exec "${SPARK_HOME}"/bin/spark-submit --class 
org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver "$@"

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/bin/spark-submit
----------------------------------------------------------------------
diff --git a/bin/spark-submit b/bin/spark-submit
index 255378b..023f9c1 100755
--- a/bin/spark-submit
+++ b/bin/spark-submit
@@ -17,9 +17,11 @@
 # limitations under the License.
 #
 
-SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 # disable randomized hash for string in Python 3.3+
 export PYTHONHASHSEED=0
 
-exec "$SPARK_HOME"/bin/spark-class org.apache.spark.deploy.SparkSubmit "$@"
+exec "${SPARK_HOME}"/bin/spark-class org.apache.spark.deploy.SparkSubmit "$@"

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/bin/sparkR
----------------------------------------------------------------------
diff --git a/bin/sparkR b/bin/sparkR
index 464c29f..2c07a82 100755
--- a/bin/sparkR
+++ b/bin/sparkR
@@ -17,7 +17,10 @@
 # limitations under the License.
 #
 
-export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
-source "$SPARK_HOME"/bin/load-spark-env.sh
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+source "${SPARK_HOME}"/bin/load-spark-env.sh
 export _SPARK_CMD_USAGE="Usage: ./bin/sparkR [options]"
-exec "$SPARK_HOME"/bin/spark-submit sparkr-shell-main "$@"
+exec "${SPARK_HOME}"/bin/spark-submit sparkr-shell-main "$@"

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/slaves.sh
----------------------------------------------------------------------
diff --git a/sbin/slaves.sh b/sbin/slaves.sh
index cdad47e..c971aa3 100755
--- a/sbin/slaves.sh
+++ b/sbin/slaves.sh
@@ -36,10 +36,11 @@ if [ $# -le 0 ]; then
   exit 1
 fi
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
 # If the slaves file is specified in the command line,
 # then it takes precedence over the definition in
@@ -65,7 +66,7 @@ then
   shift
 fi
 
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 if [ "$HOSTLIST" = "" ]; then
   if [ "$SPARK_SLAVES" = "" ]; then

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/spark-config.sh
----------------------------------------------------------------------
diff --git a/sbin/spark-config.sh b/sbin/spark-config.sh
index e6bf544..d8d9d00 100755
--- a/sbin/spark-config.sh
+++ b/sbin/spark-config.sh
@@ -19,21 +19,12 @@
 # should not be executable directly
 # also should not be passed any arguments, since we need original $*
 
-# resolve links - $0 may be a softlink
-this="${BASH_SOURCE:-$0}"
-common_bin="$(cd -P -- "$(dirname -- "$this")" && pwd -P)"
-script="$(basename -- "$this")"
-this="$common_bin/$script"
+# symlink and absolute path should rely on SPARK_HOME to resolve
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-# convert relative path to absolute path
-config_bin="`dirname "$this"`"
-script="`basename "$this"`"
-config_bin="`cd "$config_bin"; pwd`"
-this="$config_bin/$script"
-
-export SPARK_PREFIX="`dirname "$this"`"/..
-export SPARK_HOME="${SPARK_PREFIX}"
-export SPARK_CONF_DIR="${SPARK_CONF_DIR:-"$SPARK_HOME/conf"}"
+export SPARK_CONF_DIR="${SPARK_CONF_DIR:-"${SPARK_HOME}/conf"}"
 # Add the PySpark classes to the PYTHONPATH:
-export PYTHONPATH="$SPARK_HOME/python:$PYTHONPATH"
-export PYTHONPATH="$SPARK_HOME/python/lib/py4j-0.9-src.zip:$PYTHONPATH"
+export PYTHONPATH="${SPARK_HOME}/python:${PYTHONPATH}"
+export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.9-src.zip:${PYTHONPATH}"

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/spark-daemon.sh
----------------------------------------------------------------------
diff --git a/sbin/spark-daemon.sh b/sbin/spark-daemon.sh
index 0fbe795..6ab57df 100755
--- a/sbin/spark-daemon.sh
+++ b/sbin/spark-daemon.sh
@@ -37,10 +37,11 @@ if [ $# -le 1 ]; then
   exit 1
 fi
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
 # get arguments
 
@@ -86,7 +87,7 @@ spark_rotate_log ()
     fi
 }
 
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 if [ "$SPARK_IDENT_STRING" = "" ]; then
   export SPARK_IDENT_STRING="$USER"
@@ -97,7 +98,7 @@ export SPARK_PRINT_LAUNCH_COMMAND="1"
 
 # get log directory
 if [ "$SPARK_LOG_DIR" = "" ]; then
-  export SPARK_LOG_DIR="$SPARK_HOME/logs"
+  export SPARK_LOG_DIR="${SPARK_HOME}/logs"
 fi
 mkdir -p "$SPARK_LOG_DIR"
 touch "$SPARK_LOG_DIR"/.spark_test > /dev/null 2>&1
@@ -137,7 +138,7 @@ run_command() {
 
   if [ "$SPARK_MASTER" != "" ]; then
     echo rsync from "$SPARK_MASTER"
-    rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' 
--exclude='contrib/hod/logs/*' "$SPARK_MASTER/" "$SPARK_HOME"
+    rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' 
--exclude='contrib/hod/logs/*' "$SPARK_MASTER/" "${SPARK_HOME}"
   fi
 
   spark_rotate_log "$log"
@@ -145,12 +146,12 @@ run_command() {
 
   case "$mode" in
     (class)
-      nohup nice -n "$SPARK_NICENESS" "$SPARK_PREFIX"/bin/spark-class $command 
"$@" >> "$log" 2>&1 < /dev/null &
+      nohup nice -n "$SPARK_NICENESS" "${SPARK_HOME}"/bin/spark-class $command 
"$@" >> "$log" 2>&1 < /dev/null &
       newpid="$!"
       ;;
 
     (submit)
-      nohup nice -n "$SPARK_NICENESS" "$SPARK_PREFIX"/bin/spark-submit --class 
$command "$@" >> "$log" 2>&1 < /dev/null &
+      nohup nice -n "$SPARK_NICENESS" "${SPARK_HOME}"/bin/spark-submit --class 
$command "$@" >> "$log" 2>&1 < /dev/null &
       newpid="$!"
       ;;
 
@@ -205,13 +206,13 @@ case $option in
       else
         echo $pid file is present but $command not running
         exit 1
-      fi  
+      fi
     else
       echo $command not running.
       exit 2
-    fi  
+    fi
     ;;
-  
+
   (*)
     echo $usage
     exit 1

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/spark-daemons.sh
----------------------------------------------------------------------
diff --git a/sbin/spark-daemons.sh b/sbin/spark-daemons.sh
index 5d9f2bb..dec2f44 100755
--- a/sbin/spark-daemons.sh
+++ b/sbin/spark-daemons.sh
@@ -27,9 +27,10 @@ if [ $# -le 1 ]; then
   exit 1
 fi
 
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/spark-daemon.sh" "$@"
+exec "${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; 
"${SPARK_HOME}/sbin/spark-daemon.sh" "$@"

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/start-all.sh
----------------------------------------------------------------------
diff --git a/sbin/start-all.sh b/sbin/start-all.sh
index 1baf57c..6217f9b 100755
--- a/sbin/start-all.sh
+++ b/sbin/start-all.sh
@@ -21,8 +21,9 @@
 # Starts the master on this node.
 # Starts a worker on each node specified in conf/slaves
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 TACHYON_STR=""
 
@@ -36,10 +37,10 @@ shift
 done
 
 # Load the Spark configuration
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
 # Start Master
-"$sbin"/start-master.sh $TACHYON_STR
+"${SPARK_HOME}/sbin"/start-master.sh $TACHYON_STR
 
 # Start Workers
-"$sbin"/start-slaves.sh $TACHYON_STR
+"${SPARK_HOME}/sbin"/start-slaves.sh $TACHYON_STR

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/start-history-server.sh
----------------------------------------------------------------------
diff --git a/sbin/start-history-server.sh b/sbin/start-history-server.sh
index 9034e57..6851d99 100755
--- a/sbin/start-history-server.sh
+++ b/sbin/start-history-server.sh
@@ -24,10 +24,11 @@
 # Use the SPARK_HISTORY_OPTS environment variable to set history server 
configuration.
 #
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
-exec "$sbin"/spark-daemon.sh start 
org.apache.spark.deploy.history.HistoryServer 1 $@
+exec "${SPARK_HOME}/sbin"/spark-daemon.sh start 
org.apache.spark.deploy.history.HistoryServer 1 $@

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/start-master.sh
----------------------------------------------------------------------
diff --git a/sbin/start-master.sh b/sbin/start-master.sh
index a7f5d57..c20e19a 100755
--- a/sbin/start-master.sh
+++ b/sbin/start-master.sh
@@ -19,8 +19,9 @@
 
 # Starts the master on the machine this script is executed on.
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 ORIGINAL_ARGS="$@"
 
@@ -39,9 +40,9 @@ case $1 in
 shift
 done
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 if [ "$SPARK_MASTER_PORT" = "" ]; then
   SPARK_MASTER_PORT=7077
@@ -55,12 +56,12 @@ if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then
   SPARK_MASTER_WEBUI_PORT=8080
 fi
 
-"$sbin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 \
+"${SPARK_HOME}/sbin"/spark-daemon.sh start 
org.apache.spark.deploy.master.Master 1 \
   --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port 
$SPARK_MASTER_WEBUI_PORT \
   $ORIGINAL_ARGS
 
 if [ "$START_TACHYON" == "true" ]; then
-  "$sbin"/../tachyon/bin/tachyon bootstrap-conf $SPARK_MASTER_IP
-  "$sbin"/../tachyon/bin/tachyon format -s
-  "$sbin"/../tachyon/bin/tachyon-start.sh master
+  "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon bootstrap-conf $SPARK_MASTER_IP
+  "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon format -s
+  "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon-start.sh master
 fi

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/start-mesos-dispatcher.sh
----------------------------------------------------------------------
diff --git a/sbin/start-mesos-dispatcher.sh b/sbin/start-mesos-dispatcher.sh
index ef1fc57..4777e16 100755
--- a/sbin/start-mesos-dispatcher.sh
+++ b/sbin/start-mesos-dispatcher.sh
@@ -21,12 +21,13 @@
 # Rest server to handle driver requests for Mesos cluster mode.
 # Only one cluster dispatcher is needed per Mesos cluster.
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 if [ "$SPARK_MESOS_DISPATCHER_PORT" = "" ]; then
   SPARK_MESOS_DISPATCHER_PORT=7077
@@ -37,4 +38,4 @@ if [ "$SPARK_MESOS_DISPATCHER_HOST" = "" ]; then
 fi
 
 
-"$sbin"/spark-daemon.sh start 
org.apache.spark.deploy.mesos.MesosClusterDispatcher 1 --host 
$SPARK_MESOS_DISPATCHER_HOST --port $SPARK_MESOS_DISPATCHER_PORT "$@"
+"${SPARK_HOME}/sbin"/spark-daemon.sh start 
org.apache.spark.deploy.mesos.MesosClusterDispatcher 1 --host 
$SPARK_MESOS_DISPATCHER_HOST --port $SPARK_MESOS_DISPATCHER_PORT "$@"

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/start-mesos-shuffle-service.sh
----------------------------------------------------------------------
diff --git a/sbin/start-mesos-shuffle-service.sh 
b/sbin/start-mesos-shuffle-service.sh
index 6458076..1845845 100755
--- a/sbin/start-mesos-shuffle-service.sh
+++ b/sbin/start-mesos-shuffle-service.sh
@@ -26,10 +26,11 @@
 # Use the SPARK_SHUFFLE_OPTS environment variable to set shuffle service 
configuration.
 #
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
-exec "$sbin"/spark-daemon.sh start 
org.apache.spark.deploy.mesos.MesosExternalShuffleService 1
+exec "${SPARK_HOME}/sbin"/spark-daemon.sh start 
org.apache.spark.deploy.mesos.MesosExternalShuffleService 1

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/start-shuffle-service.sh
----------------------------------------------------------------------
diff --git a/sbin/start-shuffle-service.sh b/sbin/start-shuffle-service.sh
index 4fddcf7..793e165 100755
--- a/sbin/start-shuffle-service.sh
+++ b/sbin/start-shuffle-service.sh
@@ -24,10 +24,11 @@
 # Use the SPARK_SHUFFLE_OPTS environment variable to set shuffle server 
configuration.
 #
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
-exec "$sbin"/spark-daemon.sh start 
org.apache.spark.deploy.ExternalShuffleService 1
+exec "${SPARK_HOME}/sbin"/spark-daemon.sh start 
org.apache.spark.deploy.ExternalShuffleService 1

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/start-slave.sh
----------------------------------------------------------------------
diff --git a/sbin/start-slave.sh b/sbin/start-slave.sh
index 4c919ff..2145564 100755
--- a/sbin/start-slave.sh
+++ b/sbin/start-slave.sh
@@ -21,14 +21,14 @@
 #
 # Environment Variables
 #
-#   SPARK_WORKER_INSTANCES  The number of worker instances to run on this 
+#   SPARK_WORKER_INSTANCES  The number of worker instances to run on this
 #                           slave.  Default is 1.
-#   SPARK_WORKER_PORT       The base port number for the first worker. If set, 
+#   SPARK_WORKER_PORT       The base port number for the first worker. If set,
 #                           subsequent workers will increment this number.  If
 #                           unset, Spark will find a valid port number, but
 #                           with no guarantee of a predictable pattern.
 #   SPARK_WORKER_WEBUI_PORT The base port for the web interface of the first
-#                           worker.  Subsequent workers will increment this 
+#                           worker.  Subsequent workers will increment this
 #                           number.  Default is 8081.
 
 usage="Usage: start-slave.sh <spark-master-URL> where <spark-master-URL> is 
like spark://localhost:7077"
@@ -39,12 +39,13 @@ if [ $# -lt 1 ]; then
   exit 1
 fi
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 # First argument should be the master; we need to store it aside because we may
 # need to insert arguments between it and the other arguments
@@ -71,7 +72,7 @@ function start_instance {
   fi
   WEBUI_PORT=$(( $SPARK_WORKER_WEBUI_PORT + $WORKER_NUM - 1 ))
 
-  "$sbin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker 
$WORKER_NUM \
+  "${SPARK_HOME}/sbin"/spark-daemon.sh start 
org.apache.spark.deploy.worker.Worker $WORKER_NUM \
      --webui-port "$WEBUI_PORT" $PORT_FLAG $PORT_NUM $MASTER "$@"
 }
 
@@ -82,4 +83,3 @@ else
     start_instance $(( 1 + $i )) "$@"
   done
 fi
-

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/start-slaves.sh
----------------------------------------------------------------------
diff --git a/sbin/start-slaves.sh b/sbin/start-slaves.sh
index 24d6268..51ca81e 100755
--- a/sbin/start-slaves.sh
+++ b/sbin/start-slaves.sh
@@ -19,16 +19,16 @@
 
 # Starts a slave instance on each machine specified in the conf/slaves file.
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
-
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 START_TACHYON=false
 
 while (( "$#" )); do
 case $1 in
     --with-tachyon)
-      if [ ! -e "$sbin"/../tachyon/bin/tachyon ]; then
+      if [ ! -e "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon ]; then
         echo "Error: --with-tachyon specified, but tachyon not found."
         exit -1
       fi
@@ -38,9 +38,8 @@ case $1 in
 shift
 done
 
-. "$sbin/spark-config.sh"
-
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 # Find the port number for the master
 if [ "$SPARK_MASTER_PORT" = "" ]; then
@@ -52,11 +51,11 @@ if [ "$SPARK_MASTER_IP" = "" ]; then
 fi
 
 if [ "$START_TACHYON" == "true" ]; then
-  "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin"/../tachyon/bin/tachyon 
bootstrap-conf "$SPARK_MASTER_IP"
+  "${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; 
"${SPARK_HOME}/sbin"/../tachyon/bin/tachyon bootstrap-conf "$SPARK_MASTER_IP"
 
   # set -t so we can call sudo
-  SPARK_SSH_OPTS="-o StrictHostKeyChecking=no -t" "$sbin/slaves.sh" cd 
"$SPARK_HOME" \; "$sbin/../tachyon/bin/tachyon-start.sh" worker SudoMount \; 
sleep 1
+  SPARK_SSH_OPTS="-o StrictHostKeyChecking=no -t" 
"${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; 
"${SPARK_HOME}/tachyon/bin/tachyon-start.sh" worker SudoMount \; sleep 1
 fi
 
 # Launch the slaves
-"$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" 
"spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT"
+"${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; 
"${SPARK_HOME}/sbin/start-slave.sh" 
"spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT"

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/start-thriftserver.sh
----------------------------------------------------------------------
diff --git a/sbin/start-thriftserver.sh b/sbin/start-thriftserver.sh
index 5b0aeb1..ad7e7c5 100755
--- a/sbin/start-thriftserver.sh
+++ b/sbin/start-thriftserver.sh
@@ -23,8 +23,9 @@
 # Enter posix mode for bash
 set -o posix
 
-# Figure out where Spark is installed
-FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 # NOTE: This exact class name is matched downstream by SparkSubmit.
 # Any changes need to be reflected there.
@@ -39,10 +40,10 @@ function usage {
   pattern+="\|======="
   pattern+="\|--help"
 
-  "$FWDIR"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
+  "${SPARK_HOME}"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
   echo
   echo "Thrift server options:"
-  "$FWDIR"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
+  "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
 }
 
 if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
@@ -52,4 +53,4 @@ fi
 
 export SUBMIT_USAGE_FUNCTION=usage
 
-exec "$FWDIR"/sbin/spark-daemon.sh submit $CLASS 1 "$@"
+exec "${SPARK_HOME}"/sbin/spark-daemon.sh submit $CLASS 1 "$@"

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/stop-all.sh
----------------------------------------------------------------------
diff --git a/sbin/stop-all.sh b/sbin/stop-all.sh
index 1a9abe0..4e476ca 100755
--- a/sbin/stop-all.sh
+++ b/sbin/stop-all.sh
@@ -20,23 +20,23 @@
 # Stop all spark daemons.
 # Run this on the master node.
 
-
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 # Load the Spark configuration
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
 # Stop the slaves, then the master
-"$sbin"/stop-slaves.sh
-"$sbin"/stop-master.sh
+"${SPARK_HOME}/sbin"/stop-slaves.sh
+"${SPARK_HOME}/sbin"/stop-master.sh
 
 if [ "$1" == "--wait" ]
 then
   printf "Waiting for workers to shut down..."
   while true
   do
-    running=`$sbin/slaves.sh ps -ef | grep -v grep | grep deploy.worker.Worker`
+    running=`${SPARK_HOME}/sbin/slaves.sh ps -ef | grep -v grep | grep 
deploy.worker.Worker`
     if [ -z "$running" ]
     then
       printf "\nAll workers successfully shut down.\n"

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/stop-history-server.sh
----------------------------------------------------------------------
diff --git a/sbin/stop-history-server.sh b/sbin/stop-history-server.sh
index 6e60563..14e3af4 100755
--- a/sbin/stop-history-server.sh
+++ b/sbin/stop-history-server.sh
@@ -19,7 +19,8 @@
 
 # Stops the history server on the machine this script is executed on.
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.history.HistoryServer 1
+"${SPARK_HOME}/sbin/spark-daemon.sh" stop 
org.apache.spark.deploy.history.HistoryServer 1

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/stop-master.sh
----------------------------------------------------------------------
diff --git a/sbin/stop-master.sh b/sbin/stop-master.sh
index 729702d..e57962b 100755
--- a/sbin/stop-master.sh
+++ b/sbin/stop-master.sh
@@ -19,13 +19,14 @@
 
 # Stops the master on the machine this script is executed on.
 
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop 
org.apache.spark.deploy.master.Master 1
 
-if [ -e "$sbin"/../tachyon/bin/tachyon ]; then
-  "$sbin"/../tachyon/bin/tachyon killAll tachyon.master.Master
+if [ -e "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon ]; then
+  "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon killAll tachyon.master.Master
 fi

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/stop-mesos-dispatcher.sh
----------------------------------------------------------------------
diff --git a/sbin/stop-mesos-dispatcher.sh b/sbin/stop-mesos-dispatcher.sh
index cb65d95..5c0b4e0 100755
--- a/sbin/stop-mesos-dispatcher.sh
+++ b/sbin/stop-mesos-dispatcher.sh
@@ -18,10 +18,11 @@
 #
 # Stop the Mesos Cluster dispatcher on the machine this script is executed on.
 
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-"$sbin"/spark-daemon.sh stop 
org.apache.spark.deploy.mesos.MesosClusterDispatcher 1
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop 
org.apache.spark.deploy.mesos.MesosClusterDispatcher 1
 

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/stop-mesos-shuffle-service.sh
----------------------------------------------------------------------
diff --git a/sbin/stop-mesos-shuffle-service.sh 
b/sbin/stop-mesos-shuffle-service.sh
index 0e965d5..d23cad3 100755
--- a/sbin/stop-mesos-shuffle-service.sh
+++ b/sbin/stop-mesos-shuffle-service.sh
@@ -19,7 +19,8 @@
 
 # Stops the Mesos external shuffle service on the machine this script is 
executed on.
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-"$sbin"/spark-daemon.sh stop 
org.apache.spark.deploy.mesos.MesosExternalShuffleService 1
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop 
org.apache.spark.deploy.mesos.MesosExternalShuffleService 1

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/stop-shuffle-service.sh
----------------------------------------------------------------------
diff --git a/sbin/stop-shuffle-service.sh b/sbin/stop-shuffle-service.sh
index 4cb6891..50d69cf 100755
--- a/sbin/stop-shuffle-service.sh
+++ b/sbin/stop-shuffle-service.sh
@@ -19,7 +19,8 @@
 
 # Stops the external shuffle service on the machine this script is executed on.
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.ExternalShuffleService 1
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop 
org.apache.spark.deploy.ExternalShuffleService 1

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/stop-slave.sh
----------------------------------------------------------------------
diff --git a/sbin/stop-slave.sh b/sbin/stop-slave.sh
index 3d1da5b..685bcf5 100755
--- a/sbin/stop-slave.sh
+++ b/sbin/stop-slave.sh
@@ -21,23 +21,24 @@
 #
 # Environment variables
 #
-#   SPARK_WORKER_INSTANCES The number of worker instances that should be 
+#   SPARK_WORKER_INSTANCES The number of worker instances that should be
 #                          running on this slave.  Default is 1.
 
 # Usage: stop-slave.sh
 #   Stops all slaves on this worker machine
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
-  "$sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker 1
+  "${SPARK_HOME}/sbin"/spark-daemon.sh stop 
org.apache.spark.deploy.worker.Worker 1
 else
   for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
-    "$sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker $(( $i 
+ 1 ))
+    "${SPARK_HOME}/sbin"/spark-daemon.sh stop 
org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
   done
 fi

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/stop-slaves.sh
----------------------------------------------------------------------
diff --git a/sbin/stop-slaves.sh b/sbin/stop-slaves.sh
index 54c9bd4..6395637 100755
--- a/sbin/stop-slaves.sh
+++ b/sbin/stop-slaves.sh
@@ -17,16 +17,17 @@
 # limitations under the License.
 #
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 # do before the below calls as they exec
-if [ -e "$sbin"/../tachyon/bin/tachyon ]; then
-  "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin"/../tachyon/bin/tachyon killAll 
tachyon.worker.Worker
+if [ -e "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon ]; then
+  "${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; 
"${SPARK_HOME}/sbin"/../tachyon/bin/tachyon killAll tachyon.worker.Worker
 fi
 
-"$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin"/stop-slave.sh
+"${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; 
"${SPARK_HOME}/sbin"/stop-slave.sh

http://git-wip-us.apache.org/repos/asf/spark/blob/8aff36e9/sbin/stop-thriftserver.sh
----------------------------------------------------------------------
diff --git a/sbin/stop-thriftserver.sh b/sbin/stop-thriftserver.sh
index 4031a00..cf45058 100755
--- a/sbin/stop-thriftserver.sh
+++ b/sbin/stop-thriftserver.sh
@@ -19,7 +19,8 @@
 
 # Stops the thrift server on the machine this script is executed on.
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-"$sbin"/spark-daemon.sh stop 
org.apache.spark.sql.hive.thriftserver.HiveThriftServer2 1
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop 
org.apache.spark.sql.hive.thriftserver.HiveThriftServer2 1


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to