Author: ddas
Date: Tue Feb 10 11:12:50 2009
New Revision: 742937
URL: http://svn.apache.org/viewvc?rev=742937&view=rev
Log:
HADOOP-4868. Splits the hadoop script into three parts - bin/hadoop, bin/mapred
and bin/hdfs. Contributed by Sharad Agarwal.
Added:
hadoop/core/trunk/bin/hdfs (with props)
hadoop/core/trunk/bin/hdfs-config.sh
hadoop/core/trunk/bin/mapred (with props)
hadoop/core/trunk/bin/mapred-config.sh
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/bin/hadoop
hadoop/core/trunk/bin/hadoop-config.sh
hadoop/core/trunk/bin/hadoop-daemon.sh
hadoop/core/trunk/bin/start-all.sh
hadoop/core/trunk/bin/start-balancer.sh
hadoop/core/trunk/bin/start-dfs.sh
hadoop/core/trunk/bin/start-mapred.sh
hadoop/core/trunk/bin/stop-all.sh
hadoop/core/trunk/bin/stop-balancer.sh
hadoop/core/trunk/bin/stop-dfs.sh
hadoop/core/trunk/bin/stop-mapred.sh
Modified: hadoop/core/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=742937&r1=742936&r2=742937&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Tue Feb 10 11:12:50 2009
@@ -92,6 +92,9 @@
tools, and example jars. Let findbugs depend on this rather than the 'tar'
target. (Giridharan Kesavan via cdouglas)
+ HADOOP-4868. Splits the hadoop script into three parts - bin/hadoop,
bin/mapred and
+ bin/hdfs. (Sharad Agarwal via ddas)
+
OPTIMIZATIONS
BUG FIXES
Modified: hadoop/core/trunk/bin/hadoop
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/hadoop?rev=742937&r1=742936&r2=742937&view=diff
==============================================================================
--- hadoop/core/trunk/bin/hadoop (original)
+++ hadoop/core/trunk/bin/hadoop Tue Feb 10 11:12:50 2009
@@ -15,60 +15,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-
-# The Hadoop command script
-#
-# Environment Variables
-#
-# JAVA_HOME The java implementation to use. Overrides JAVA_HOME.
-#
-# HADOOP_CLASSPATH Extra Java CLASSPATH entries.
-#
-# HADOOP_HEAPSIZE The maximum amount of heap to use, in MB.
-# Default is 1000.
-#
-# HADOOP_OPTS Extra Java runtime options.
-#
-# HADOOP_NAMENODE_OPTS These options are added to HADOOP_OPTS
-# HADOOP_CLIENT_OPTS when the respective command is run.
-# HADOOP_{COMMAND}_OPTS etc HADOOP_JT_OPTS applies to JobTracker
-# for e.g. HADOOP_CLIENT_OPTS applies to
-# more than one command (fs, dfs, fsck,
-# dfsadmin etc)
-#
-# HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_HOME}/conf.
-#
-# HADOOP_ROOT_LOGGER The root appender. Default is INFO,console
-#
+# This script runs the hadoop core commands.
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
-
+
. "$bin"/hadoop-config.sh
-cygwin=false
-case "`uname`" in
-CYGWIN*) cygwin=true;;
-esac
-
-# if no args specified, show usage
-if [ $# = 0 ]; then
+function print_usage(){
echo "Usage: hadoop [--config confdir] COMMAND"
- echo "where COMMAND is one of:"
- echo " namenode -format format the DFS filesystem"
- echo " secondarynamenode run the DFS secondary namenode"
- echo " namenode run the DFS namenode"
- echo " datanode run a DFS datanode"
- echo " dfsadmin run a DFS admin client"
- echo " mradmin run a Map-Reduce admin client"
- echo " fsck run a DFS filesystem checking utility"
+ echo " where COMMAND is one of:"
echo " fs run a generic filesystem user client"
- echo " balancer run a cluster balancing utility"
- echo " jobtracker run the MapReduce job Tracker node"
- echo " pipes run a Pipes job"
- echo " tasktracker run a MapReduce task Tracker node"
- echo " job manipulate MapReduce jobs"
- echo " queue get information regarding JobQueues"
echo " version print the version"
echo " jar <jar> run a jar file"
echo " distcp <srcurl> <desturl> copy file or directories recursively"
@@ -76,214 +33,72 @@
echo " daemonlog get/set the log level for each daemon"
echo " or"
echo " CLASSNAME run the class named CLASSNAME"
+ echo ""
echo "Most commands print help when invoked w/o parameters."
- exit 1
-fi
-
-# get arguments
-COMMAND=$1
-shift
-
-if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
- . "${HADOOP_CONF_DIR}/hadoop-env.sh"
-fi
-
-# some Java parameters
-if [ "$JAVA_HOME" != "" ]; then
- #echo "run java in $JAVA_HOME"
- JAVA_HOME=$JAVA_HOME
-fi
-
-if [ "$JAVA_HOME" = "" ]; then
- echo "Error: JAVA_HOME is not set."
- exit 1
-fi
-
-JAVA=$JAVA_HOME/bin/java
-JAVA_HEAP_MAX=-Xmx1000m
-
-# check envvars which might override default args
-if [ "$HADOOP_HEAPSIZE" != "" ]; then
- #echo "run with heapsize $HADOOP_HEAPSIZE"
- JAVA_HEAP_MAX="-Xmx""$HADOOP_HEAPSIZE""m"
- #echo $JAVA_HEAP_MAX
-fi
-
-# CLASSPATH initially contains $HADOOP_CONF_DIR
-CLASSPATH="${HADOOP_CONF_DIR}"
-CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
-
-# for developers, add Hadoop classes to CLASSPATH
-if [ -d "$HADOOP_HOME/build/classes" ]; then
- CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes
-fi
-if [ -d "$HADOOP_HOME/build/webapps" ]; then
- CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build
-fi
-if [ -d "$HADOOP_HOME/build/test/classes" ]; then
- CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classes
-fi
-if [ -d "$HADOOP_HOME/build/tools" ]; then
- CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/tools
-fi
-
-# so that filenames w/ spaces are handled correctly in loops below
-IFS=
-
-# for releases, add core hadoop jar & webapps to CLASSPATH
-if [ -d "$HADOOP_HOME/webapps" ]; then
- CLASSPATH=${CLASSPATH}:$HADOOP_HOME
-fi
-for f in $HADOOP_HOME/hadoop-*-core.jar; do
- CLASSPATH=${CLASSPATH}:$f;
-done
-
-# add libs to CLASSPATH
-for f in $HADOOP_HOME/lib/*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
-done
-
-if [ -d "$HADOOP_HOME/build/ivy/lib/Hadoop/common" ]; then
-for f in $HADOOP_HOME/build/ivy/lib/Hadoop/common/*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
-done
-fi
-
-for f in $HADOOP_HOME/lib/jsp-2.1/*.jar; do
- CLASSPATH=${CLASSPATH}:$f;
-done
-
-for f in $HADOOP_HOME/hadoop-*-tools.jar; do
- TOOL_PATH=${TOOL_PATH}:$f;
-done
-for f in $HADOOP_HOME/build/hadoop-*-tools.jar; do
- TOOL_PATH=${TOOL_PATH}:$f;
-done
-
-# add user-specified CLASSPATH last
-if [ "$HADOOP_CLASSPATH" != "" ]; then
- CLASSPATH=${CLASSPATH}:${HADOOP_CLASSPATH}
-fi
-
-# default log directory & file
-if [ "$HADOOP_LOG_DIR" = "" ]; then
- HADOOP_LOG_DIR="$HADOOP_HOME/logs"
-fi
-if [ "$HADOOP_LOGFILE" = "" ]; then
- HADOOP_LOGFILE='hadoop.log'
-fi
-
-# default policy file for service-level authorization
-if [ "$HADOOP_POLICYFILE" = "" ]; then
- HADOOP_POLICYFILE="hadoop-policy.xml"
-fi
-
-# restore ordinary behaviour
-unset IFS
+}
-# figure out which class to run
-if [ "$COMMAND" = "namenode" ] ; then
- CLASS='org.apache.hadoop.hdfs.server.namenode.NameNode'
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_NAMENODE_OPTS"
-elif [ "$COMMAND" = "secondarynamenode" ] ; then
- CLASS='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_SECONDARYNAMENODE_OPTS"
-elif [ "$COMMAND" = "datanode" ] ; then
- CLASS='org.apache.hadoop.hdfs.server.datanode.DataNode'
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_DATANODE_OPTS"
-elif [ "$COMMAND" = "fs" ] ; then
- CLASS=org.apache.hadoop.fs.FsShell
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "dfs" ] ; then
- CLASS=org.apache.hadoop.fs.FsShell
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "dfsadmin" ] ; then
- CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "mradmin" ] ; then
- CLASS=org.apache.hadoop.mapred.tools.MRAdmin
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "fsck" ] ; then
- CLASS=org.apache.hadoop.hdfs.tools.DFSck
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "balancer" ] ; then
- CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_BALANCER_OPTS"
-elif [ "$COMMAND" = "jobtracker" ] ; then
- CLASS=org.apache.hadoop.mapred.JobTracker
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_JOBTRACKER_OPTS"
-elif [ "$COMMAND" = "tasktracker" ] ; then
- CLASS=org.apache.hadoop.mapred.TaskTracker
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_TASKTRACKER_OPTS"
-elif [ "$COMMAND" = "job" ] ; then
- CLASS=org.apache.hadoop.mapred.JobClient
-elif [ "$COMMAND" = "queue" ] ; then
- CLASS=org.apache.hadoop.mapred.JobQueueClient
-elif [ "$COMMAND" = "pipes" ] ; then
- CLASS=org.apache.hadoop.mapred.pipes.Submitter
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "version" ] ; then
- CLASS=org.apache.hadoop.util.VersionInfo
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "jar" ] ; then
- CLASS=org.apache.hadoop.util.RunJar
-elif [ "$COMMAND" = "distcp" ] ; then
- CLASS=org.apache.hadoop.tools.DistCp
- CLASSPATH=${CLASSPATH}:${TOOL_PATH}
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "daemonlog" ] ; then
- CLASS=org.apache.hadoop.log.LogLevel
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "archive" ] ; then
- CLASS=org.apache.hadoop.tools.HadoopArchives
- CLASSPATH=${CLASSPATH}:${TOOL_PATH}
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "sampler" ] ; then
- CLASS=org.apache.hadoop.mapred.lib.InputSampler
- HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-else
- CLASS=$COMMAND
+if [ $# = 0 ]; then
+ print_usage
+ exit
fi
-# cygwin path translation
-if $cygwin; then
- CLASSPATH=`cygpath -p -w "$CLASSPATH"`
- HADOOP_HOME=`cygpath -w "$HADOOP_HOME"`
- HADOOP_LOG_DIR=`cygpath -w "$HADOOP_LOG_DIR"`
- TOOL_PATH=`cygpath -p -w "$TOOL_PATH"`
-fi
-# setup 'java.library.path' for native-hadoop code if necessary
-JAVA_LIBRARY_PATH=''
-if [ -d "${HADOOP_HOME}/build/native" -o -d "${HADOOP_HOME}/lib/native" ]; then
- JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA}
org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
-
- if [ -d "$HADOOP_HOME/build/native" ]; then
- JAVA_LIBRARY_PATH=${HADOOP_HOME}/build/native/${JAVA_PLATFORM}/lib
- fi
-
- if [ -d "${HADOOP_HOME}/lib/native" ]; then
- if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
-
JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
+COMMAND=$1
+case $COMMAND in
+ #hdfs commands
+ namenode|secondarynamenode|datanode|dfs|dfsadmin|fsck|balancer)
+ echo "DEPRECATED: Use of this script to execute hdfs command is
deprecated."
+ echo "Instead use the hdfs command for it."
+ echo ""
+ #try to locate hdfs and if present, delegate to it.
+ if [ -f "${HADOOP_HDFS_HOME}"/bin/hdfs ]; then
+ "${HADOOP_HDFS_HOME}"/bin/hdfs $*
else
- JAVA_LIBRARY_PATH=${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
+ echo "HDFS not found."
+ exit
fi
- fi
-fi
+ ;;
-# cygwin path translation
-if $cygwin; then
- JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
-fi
+ #mapred commands
+ mradmin|jobtracker|tasktracker|pipes|job|queue)
+ echo "DEPRECATED: Use of this script to execute mapred command is
deprecated."
+ echo "Instead use the mapred command for it."
+ echo ""
+ #try to locate mapred and if present, delegate to it.
+ if [ -f "${HADOOP_MAPRED_HOME}"/bin/mapred ]; then
+ "${HADOOP_MAPRED_HOME}"/bin/mapred $*
+ else
+ echo "MAPRED not found."
+ exit
+ fi
+ ;;
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_HOME"
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
-HADOOP_OPTS="$HADOOP_OPTS
-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
-if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
- HADOOP_OPTS="$HADOOP_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
-fi
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.policy.file=$HADOOP_POLICYFILE"
+ #core commands
+ *)
+ # the core commands
+ if [ "$COMMAND" = "fs" ] ; then
+ CLASS=org.apache.hadoop.fs.FsShell
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+ elif [ "$COMMAND" = "version" ] ; then
+ CLASS=org.apache.hadoop.util.VersionInfo
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+ elif [ "$COMMAND" = "jar" ] ; then
+ CLASS=org.apache.hadoop.util.RunJar
+ elif [ "$COMMAND" = "distcp" ] ; then
+ CLASS=org.apache.hadoop.tools.DistCp
+ CLASSPATH=${CLASSPATH}:${TOOL_PATH}
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+ elif [ "$COMMAND" = "daemonlog" ] ; then
+ CLASS=org.apache.hadoop.log.LogLevel
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+ elif [ "$COMMAND" = "archive" ] ; then
+ CLASS=org.apache.hadoop.tools.HadoopArchives
+ CLASSPATH=${CLASSPATH}:${TOOL_PATH}
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+ else
+ CLASS=$COMMAND
+ fi
+ shift
+ exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS
"$@"
+ ;;
-# run it
-exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
+esac
Modified: hadoop/core/trunk/bin/hadoop-config.sh
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/hadoop-config.sh?rev=742937&r1=742936&r2=742937&view=diff
==============================================================================
--- hadoop/core/trunk/bin/hadoop-config.sh (original)
+++ hadoop/core/trunk/bin/hadoop-config.sh Tue Feb 10 11:12:50 2009
@@ -37,7 +37,11 @@
this="$bin/$script"
# the root of the Hadoop installation
+#TODO: change the env variable when dir structure is changed
export HADOOP_HOME=`dirname "$this"`/..
+export HADOOP_CORE_HOME="${HADOOP_CORE_HOME:-$HADOOP_HOME}"
+#export HADOOP_HOME=`dirname "$this"`/../..
+#export HADOOP_CORE_HOME="${HADOOP_CORE_HOME:-`dirname "$this"`/..}"
#check to see if the conf dir is given as an optional argument
if [ $# -gt 1 ]
@@ -66,3 +70,180 @@
export HADOOP_SLAVES="${HADOOP_CONF_DIR}/$slavesfile"
fi
fi
+
+cygwin=false
+case "`uname`" in
+CYGWIN*) cygwin=true;;
+esac
+
+if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
+ . "${HADOOP_CONF_DIR}/hadoop-env.sh"
+fi
+
+# some Java parameters
+if [ "$JAVA_HOME" != "" ]; then
+ #echo "run java in $JAVA_HOME"
+ JAVA_HOME=$JAVA_HOME
+fi
+
+if [ "$JAVA_HOME" = "" ]; then
+ echo "Error: JAVA_HOME is not set."
+ exit 1
+fi
+
+JAVA=$JAVA_HOME/bin/java
+JAVA_HEAP_MAX=-Xmx1000m
+
+# check envvars which might override default args
+if [ "$HADOOP_HEAPSIZE" != "" ]; then
+ #echo "run with heapsize $HADOOP_HEAPSIZE"
+ JAVA_HEAP_MAX="-Xmx""$HADOOP_HEAPSIZE""m"
+ #echo $JAVA_HEAP_MAX
+fi
+
+# CLASSPATH initially contains $HADOOP_CONF_DIR
+CLASSPATH="${HADOOP_CONF_DIR}"
+CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
+
+# for developers, add Hadoop classes to CLASSPATH
+if [ -d "$HADOOP_CORE_HOME/build/classes" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_CORE_HOME/build/classes
+fi
+if [ -d "$HADOOP_CORE_HOME/build/webapps" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_CORE_HOME/build
+fi
+if [ -d "$HADOOP_CORE_HOME/build/test/classes" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_CORE_HOME/build/test/classes
+fi
+if [ -d "$HADOOP_CORE_HOME/build/tools" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_CORE_HOME/build/tools
+fi
+
+# so that filenames w/ spaces are handled correctly in loops below
+IFS=
+
+# for releases, add core hadoop jar & webapps to CLASSPATH
+if [ -d "$HADOOP_CORE_HOME/webapps" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_CORE_HOME
+fi
+for f in $HADOOP_CORE_HOME/hadoop-*-core.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+done
+
+# add libs to CLASSPATH
+for f in $HADOOP_CORE_HOME/lib/*.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+done
+
+if [ -d "$HADOOP_CORE_HOME/build/ivy/lib/Hadoop/common" ]; then
+for f in $HADOOP_CORE_HOME/build/ivy/lib/Hadoop/common/*.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+done
+fi
+
+for f in $HADOOP_CORE_HOME/lib/jsp-2.1/*.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+done
+
+for f in $HADOOP_CORE_HOME/hadoop-*-tools.jar; do
+ TOOL_PATH=${TOOL_PATH}:$f;
+done
+for f in $HADOOP_CORE_HOME/build/hadoop-*-tools.jar; do
+ TOOL_PATH=${TOOL_PATH}:$f;
+done
+
+# add user-specified CLASSPATH last
+if [ "$HADOOP_CLASSPATH" != "" ]; then
+ CLASSPATH=${CLASSPATH}:${HADOOP_CLASSPATH}
+fi
+
+# default log directory & file
+if [ "$HADOOP_LOG_DIR" = "" ]; then
+ HADOOP_LOG_DIR="$HADOOP_HOME/logs"
+fi
+if [ "$HADOOP_LOGFILE" = "" ]; then
+ HADOOP_LOGFILE='hadoop.log'
+fi
+
+# default policy file for service-level authorization
+if [ "$HADOOP_POLICYFILE" = "" ]; then
+ HADOOP_POLICYFILE="hadoop-policy.xml"
+fi
+
+# restore ordinary behaviour
+unset IFS
+
+# cygwin path translation
+if $cygwin; then
+ CLASSPATH=`cygpath -p -w "$CLASSPATH"`
+ HADOOP_CORE_HOME=`cygpath -w "$HADOOP_CORE_HOME"`
+ HADOOP_LOG_DIR=`cygpath -w "$HADOOP_LOG_DIR"`
+ TOOL_PATH=`cygpath -p -w "$TOOL_PATH"`
+fi
+# setup 'java.library.path' for native-hadoop code if necessary
+JAVA_LIBRARY_PATH=''
+if [ -d "${HADOOP_CORE_HOME}/build/native" -o -d
"${HADOOP_CORE_HOME}/lib/native" ]; then
+ JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA}
org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
+
+ if [ -d "$HADOOP_CORE_HOME/build/native" ]; then
+ JAVA_LIBRARY_PATH=${HADOOP_CORE_HOME}/build/native/${JAVA_PLATFORM}/lib
+ fi
+
+ if [ -d "${HADOOP_CORE_HOME}/lib/native" ]; then
+ if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
+
JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_CORE_HOME}/lib/native/${JAVA_PLATFORM}
+ else
+ JAVA_LIBRARY_PATH=${HADOOP_CORE_HOME}/lib/native/${JAVA_PLATFORM}
+ fi
+ fi
+fi
+
+# cygwin path translation
+if $cygwin; then
+ JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
+fi
+
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_CORE_HOME"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
+HADOOP_OPTS="$HADOOP_OPTS
-Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
+if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
+ HADOOP_OPTS="$HADOOP_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
+fi
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.policy.file=$HADOOP_POLICYFILE"
+
+# put hdfs in classpath if present
+if [ "$HADOOP_HDFS_HOME" = "" ]; then
+ if [ -d "${HADOOP_HOME}/hdfs" ]; then
+ HADOOP_HDFS_HOME=$HADOOP_HOME/hdfs
+ echo Found HDFS installed at $HADOOP_HDFS_HOME
+ fi
+fi
+
+if [ -d "${HADOOP_HDFS_HOME}" ]; then
+ for f in $HADOOP_HDFS_HOME/hadoop-*-hdfs.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+ done
+
+ # add libs to CLASSPATH
+ for f in $HADOOP_HDFS_HOME/lib/*.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+ done
+
+ if [ -d "$HADOOP_HDFS_HOME/build/classes" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/classes
+ fi
+fi
+
+# set mapred home if mapred is present
+if [ "$HADOOP_MAPRED_HOME" = "" ]; then
+ if [ -d "${HADOOP_HOME}/mapred" ]; then
+ HADOOP_MAPRED_HOME=$HADOOP_HOME/mapred
+ echo Found MAPRED installed at $HADOOP_MAPRED_HOME
+ fi
+fi
+
+# TODO:remove this when dir structure is changed
+export HADOOP_HDFS_HOME=$HADOOP_HOME
+export HADOOP_MAPRED_HOME=$HADOOP_HOME
Modified: hadoop/core/trunk/bin/hadoop-daemon.sh
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/hadoop-daemon.sh?rev=742937&r1=742936&r2=742937&view=diff
==============================================================================
--- hadoop/core/trunk/bin/hadoop-daemon.sh (original)
+++ hadoop/core/trunk/bin/hadoop-daemon.sh Tue Feb 10 11:12:50 2009
@@ -28,7 +28,7 @@
# HADOOP_NICENESS The scheduling priority for daemons. Defaults to 0.
##
-usage="Usage: hadoop-daemon.sh [--config <conf-dir>] [--hosts hostlistfile]
(start|stop) <hadoop-command> <args...>"
+usage="Usage: hadoop-daemon.sh [--config <conf-dir>] [--hosts hostlistfile]
<hadoop-script> (start|stop) <hadoop-command> <args...>"
# if no args specified, show usage
if [ $# -le 1 ]; then
@@ -42,6 +42,8 @@
. "$bin"/hadoop-config.sh
# get arguments
+hadoopScript=$1
+shift
startStop=$1
shift
command=$1
@@ -114,7 +116,7 @@
hadoop_rotate_log $log
echo starting $command, logging to $log
cd "$HADOOP_HOME"
- nohup nice -n $HADOOP_NICENESS "$HADOOP_HOME"/bin/hadoop --config
$HADOOP_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null &
+ nohup nice -n $HADOOP_NICENESS $hadoopScript --config $HADOOP_CONF_DIR
$command "$@" > "$log" 2>&1 < /dev/null &
echo $! > $pid
sleep 1; head "$log"
;;
Added: hadoop/core/trunk/bin/hdfs
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/hdfs?rev=742937&view=auto
==============================================================================
--- hadoop/core/trunk/bin/hdfs (added)
+++ hadoop/core/trunk/bin/hdfs Tue Feb 10 11:12:50 2009
@@ -0,0 +1,99 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hdfs-config.sh
+
+function print_usage(){
+ echo "Usage: hdfs [--config confdir] COMMAND"
+ echo " where COMMAND is one of:"
+ echo " namenode -format format the DFS filesystem"
+ echo " secondarynamenode run the DFS secondary namenode"
+ echo " namenode run the DFS namenode"
+ echo " datanode run a DFS datanode"
+ echo " dfsadmin run a DFS admin client"
+ echo " fsck run a DFS filesystem checking utility"
+ echo " balancer run a cluster balancing utility"
+ echo ""
+ echo "Most commands print help when invoked w/o parameters."
+}
+
+if [ $# = 0 ]; then
+ print_usage
+ exit
+fi
+
+COMMAND=$1
+shift
+
+if [ "$COMMAND" = "namenode" ] ; then
+ CLASS='org.apache.hadoop.hdfs.server.namenode.NameNode'
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_NAMENODE_OPTS"
+elif [ "$COMMAND" = "secondarynamenode" ] ; then
+ CLASS='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_SECONDARYNAMENODE_OPTS"
+elif [ "$COMMAND" = "datanode" ] ; then
+ CLASS='org.apache.hadoop.hdfs.server.datanode.DataNode'
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_DATANODE_OPTS"
+elif [ "$COMMAND" = "dfs" ] ; then
+ CLASS=org.apache.hadoop.fs.FsShell
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "dfsadmin" ] ; then
+ CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "fsck" ] ; then
+ CLASS=org.apache.hadoop.hdfs.tools.DFSck
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "balancer" ] ; then
+ CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_BALANCER_OPTS"
+else
+ echo $COMMAND - invalid command
+ print_usage
+ exit
+fi
+
+# for developers, add hdfs classes to CLASSPATH
+if [ -d "$HADOOP_HDFS_HOME/build/classes" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/classes
+fi
+if [ -d "$HADOOP_HDFS_HOME/build/webapps" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build
+fi
+if [ -d "$HADOOP_HDFS_HOME/build/test/classes" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/test/classes
+fi
+if [ -d "$HADOOP_HDFS_HOME/build/tools" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/tools
+fi
+
+# for releases, add core hdfs jar & webapps to CLASSPATH
+if [ -d "$HADOOP_HDFS_HOME/webapps" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME
+fi
+for f in $HADOOP_HDFS_HOME/hadoop-*-hdfs.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+done
+
+# add libs to CLASSPATH
+for f in $HADOOP_HDFS_HOME/lib/*.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+done
+
+exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
Propchange: hadoop/core/trunk/bin/hdfs
------------------------------------------------------------------------------
svn:executable = *
Added: hadoop/core/trunk/bin/hdfs-config.sh
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/hdfs-config.sh?rev=742937&view=auto
==============================================================================
--- hadoop/core/trunk/bin/hdfs-config.sh (added)
+++ hadoop/core/trunk/bin/hdfs-config.sh Tue Feb 10 11:12:50 2009
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# included in all the hdfs scripts with source command
+# should not be executed directly
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+#TODO: change the env variable when directory structure is changed
+export HADOOP_CORE_HOME="${HADOOP_CORE_HOME:-$bin/..}"
+#export HADOOP_CORE_HOME="${HADOOP_CORE_HOME:-$bin/../../core}"
+
+if [ -d "${HADOOP_CORE_HOME}" ]; then
+ . "$HADOOP_CORE_HOME"/bin/hadoop-config.sh
+else
+ echo "Hadoop core not found."
+ exit
+fi
Added: hadoop/core/trunk/bin/mapred
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/mapred?rev=742937&view=auto
==============================================================================
--- hadoop/core/trunk/bin/mapred (added)
+++ hadoop/core/trunk/bin/mapred Tue Feb 10 11:12:50 2009
@@ -0,0 +1,96 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. $bin/mapred-config.sh
+
+function print_usage(){
+ echo "Usage: mapred [--config confdir] COMMAND"
+ echo " where COMMAND is one of:"
+ echo " mradmin run a Map-Reduce admin client"
+ echo " jobtracker run the MapReduce job Tracker node"
+ echo " tasktracker run a MapReduce task Tracker node"
+ echo " pipes run a Pipes job"
+ echo " job manipulate MapReduce jobs"
+ echo " queue get information regarding JobQueues"
+ echo ""
+ echo "Most commands print help when invoked w/o parameters."
+}
+
+if [ $# = 0 ]; then
+ print_usage
+ exit
+fi
+
+COMMAND=$1
+shift
+
+if [ "$COMMAND" = "mradmin" ] ; then
+ CLASS=org.apache.hadoop.mapred.tools.MRAdmin
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "jobtracker" ] ; then
+ CLASS=org.apache.hadoop.mapred.JobTracker
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_JOBTRACKER_OPTS"
+elif [ "$COMMAND" = "tasktracker" ] ; then
+ CLASS=org.apache.hadoop.mapred.TaskTracker
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_TASKTRACKER_OPTS"
+elif [ "$COMMAND" = "job" ] ; then
+ CLASS=org.apache.hadoop.mapred.JobClient
+elif [ "$COMMAND" = "queue" ] ; then
+ CLASS=org.apache.hadoop.mapred.JobQueueClient
+elif [ "$COMMAND" = "pipes" ] ; then
+ CLASS=org.apache.hadoop.mapred.pipes.Submitter
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+elif [ "$COMMAND" = "sampler" ] ; then
+ CLASS=org.apache.hadoop.mapred.lib.InputSampler
+ HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
+else
+ echo $COMMAND - invalid command
+ print_usage
+ exit
+fi
+
+# for developers, add mapred classes to CLASSPATH
+if [ -d "$HADOOP_MAPRED_HOME/build/classes" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/classes
+fi
+if [ -d "$HADOOP_MAPRED_HOME/build/webapps" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build
+fi
+if [ -d "$HADOOP_MAPRED_HOME/build/test/classes" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/test/classes
+fi
+if [ -d "$HADOOP_MAPRED_HOME/build/tools" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/tools
+fi
+
+# for releases, add core mapred jar & webapps to CLASSPATH
+if [ -d "$HADOOP_MAPRED_HOME/webapps" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME
+fi
+for f in $HADOOP_MAPRED_HOME/hadoop-*-mapred.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+done
+
+# add libs to CLASSPATH
+for f in $HADOOP_MAPRED_HOME/lib/*.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+done
+
+exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
Propchange: hadoop/core/trunk/bin/mapred
------------------------------------------------------------------------------
svn:executable = *
Added: hadoop/core/trunk/bin/mapred-config.sh
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/mapred-config.sh?rev=742937&view=auto
==============================================================================
--- hadoop/core/trunk/bin/mapred-config.sh (added)
+++ hadoop/core/trunk/bin/mapred-config.sh Tue Feb 10 11:12:50 2009
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# included in all the mapred scripts with source command
+# should not be executed directly
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+#TODO: change the env variable when directory structure is changed
+export HADOOP_CORE_HOME="${HADOOP_CORE_HOME:-$bin/..}"
+#export HADOOP_CORE_HOME="${HADOOP_CORE_HOME:-$bin/../../core}"
+
+if [ -d "${HADOOP_CORE_HOME}" ]; then
+ . "$HADOOP_CORE_HOME"/bin/hadoop-config.sh
+else
+ echo "Hadoop core not found."
+ exit
+fi
Modified: hadoop/core/trunk/bin/start-all.sh
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/start-all.sh?rev=742937&r1=742936&r2=742937&view=diff
==============================================================================
--- hadoop/core/trunk/bin/start-all.sh (original)
+++ hadoop/core/trunk/bin/start-all.sh Tue Feb 10 11:12:50 2009
@@ -18,13 +18,19 @@
# Start all hadoop daemons. Run this on master node.
+echo "This script is Deprecated. Instead use start-dfs.sh and start-mapred.sh"
+
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
. "$bin"/hadoop-config.sh
-# start dfs daemons
-"$bin"/start-dfs.sh --config $HADOOP_CONF_DIR
+# start hdfs daemons if hdfs is present
+if [ -f "${HADOOP_HDFS_HOME}"/bin/start-dfs.sh ]; then
+ "${HADOOP_HDFS_HOME}"/bin/start-dfs.sh --config $HADOOP_CONF_DIR
+fi
-# start mapred daemons
-"$bin"/start-mapred.sh --config $HADOOP_CONF_DIR
+# start mapred daemons if mapred is present
+if [ -f "${HADOOP_MAPRED_HOME}"/bin/start-mapred.sh ]; then
+ "${HADOOP_MAPRED_HOME}"/bin/start-mapred.sh --config $HADOOP_CONF_DIR
+fi
Modified: hadoop/core/trunk/bin/start-balancer.sh
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/start-balancer.sh?rev=742937&r1=742936&r2=742937&view=diff
==============================================================================
--- hadoop/core/trunk/bin/start-balancer.sh (original)
+++ hadoop/core/trunk/bin/start-balancer.sh Tue Feb 10 11:12:50 2009
@@ -18,8 +18,8 @@
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
-. "$bin"/hadoop-config.sh
+. "$bin"/hdfs-config.sh
# Start balancer daemon.
-"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR start balancer $@
+"$HADOOP_CORE_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR "$bin"/hdfs
start balancer $@
Modified: hadoop/core/trunk/bin/start-dfs.sh
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/start-dfs.sh?rev=742937&r1=742936&r2=742937&view=diff
==============================================================================
--- hadoop/core/trunk/bin/start-dfs.sh (original)
+++ hadoop/core/trunk/bin/start-dfs.sh Tue Feb 10 11:12:50 2009
@@ -25,7 +25,7 @@
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
-. "$bin"/hadoop-config.sh
+. "$bin"/hdfs-config.sh
# get arguments
if [ $# -ge 1 ]; then
@@ -47,6 +47,6 @@
# start dfs daemons
# start namenode after datanodes, to minimize time namenode is up w/o data
# note: datanodes will log connection errors until namenode starts
-"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR start namenode $nameStartOpt
-"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR start datanode $dataStartOpt
-"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR --hosts masters start
secondarynamenode
+"$HADOOP_CORE_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR "$bin"/hdfs
start namenode $nameStartOpt
+"$HADOOP_CORE_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR
"$bin"/hdfs start datanode $dataStartOpt
+"$HADOOP_CORE_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --hosts
masters "$bin"/hdfs start secondarynamenode
Modified: hadoop/core/trunk/bin/start-mapred.sh
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/start-mapred.sh?rev=742937&r1=742936&r2=742937&view=diff
==============================================================================
--- hadoop/core/trunk/bin/start-mapred.sh (original)
+++ hadoop/core/trunk/bin/start-mapred.sh Tue Feb 10 11:12:50 2009
@@ -21,9 +21,9 @@
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
-. "$bin"/hadoop-config.sh
+. $bin/mapred-config.sh
# start mapred daemons
# start jobtracker first to minimize connection errors at startup
-"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR start jobtracker
-"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR start tasktracker
+"$HADOOP_CORE_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR
"$bin"/mapred start jobtracker
+"$HADOOP_CORE_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR
"$bin"/mapred start tasktracker
\ No newline at end of file
Modified: hadoop/core/trunk/bin/stop-all.sh
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/stop-all.sh?rev=742937&r1=742936&r2=742937&view=diff
==============================================================================
--- hadoop/core/trunk/bin/stop-all.sh (original)
+++ hadoop/core/trunk/bin/stop-all.sh Tue Feb 10 11:12:50 2009
@@ -18,10 +18,20 @@
# Stop all hadoop daemons. Run this on master node.
+echo "This script is Deprecated. Instead use stop-dfs.sh and stop-mapred.sh"
+
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
. "$bin"/hadoop-config.sh
-"$bin"/stop-mapred.sh --config $HADOOP_CONF_DIR
-"$bin"/stop-dfs.sh --config $HADOOP_CONF_DIR
+# stop hdfs daemons if hdfs is present
+if [ -f "${HADOOP_HDFS_HOME}"/bin/stop-dfs.sh ]; then
+ "${HADOOP_HDFS_HOME}"/bin/stop-dfs.sh --config $HADOOP_CONF_DIR
+fi
+
+# stop mapred daemons if mapred is present
+if [ -f "${HADOOP_MAPRED_HOME}"/bin/stop-mapred.sh ]; then
+ "${HADOOP_MAPRED_HOME}"/bin/stop-mapred.sh --config $HADOOP_CONF_DIR
+fi
+
Modified: hadoop/core/trunk/bin/stop-balancer.sh
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/stop-balancer.sh?rev=742937&r1=742936&r2=742937&view=diff
==============================================================================
--- hadoop/core/trunk/bin/stop-balancer.sh (original)
+++ hadoop/core/trunk/bin/stop-balancer.sh Tue Feb 10 11:12:50 2009
@@ -18,9 +18,9 @@
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
-. "$bin"/hadoop-config.sh
+. "$bin"/hdfs-config.sh
# Stop balancer daemon.
# Run this on the machine where the balancer is running
-"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR stop balancer
+"$HADOOP_CORE_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR "$bin"/hdfs
stop balancer
Modified: hadoop/core/trunk/bin/stop-dfs.sh
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/stop-dfs.sh?rev=742937&r1=742936&r2=742937&view=diff
==============================================================================
--- hadoop/core/trunk/bin/stop-dfs.sh (original)
+++ hadoop/core/trunk/bin/stop-dfs.sh Tue Feb 10 11:12:50 2009
@@ -21,9 +21,8 @@
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
-. "$bin"/hadoop-config.sh
-
-"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR stop namenode
-"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR stop datanode
-"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR --hosts masters stop
secondarynamenode
+. "$bin"/hdfs-config.sh
+"$HADOOP_CORE_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR "$bin"/hdfs
stop namenode
+"$HADOOP_CORE_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR
"$bin"/hdfs stop datanode
+"$HADOOP_CORE_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --hosts
masters "$bin"/hdfs stop secondarynamenode
\ No newline at end of file
Modified: hadoop/core/trunk/bin/stop-mapred.sh
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/stop-mapred.sh?rev=742937&r1=742936&r2=742937&view=diff
==============================================================================
--- hadoop/core/trunk/bin/stop-mapred.sh (original)
+++ hadoop/core/trunk/bin/stop-mapred.sh Tue Feb 10 11:12:50 2009
@@ -21,8 +21,7 @@
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
-. "$bin"/hadoop-config.sh
-
-"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR stop jobtracker
-"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR stop tasktracker
+. $bin/mapred-config.sh
+"$HADOOP_CORE_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR
"$bin"/mapred stop jobtracker
+"$HADOOP_CORE_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR
"$bin"/mapred stop tasktracker
\ No newline at end of file