Author: rvs
Date: Thu Feb 23 01:09:01 2012
New Revision: 1292607
URL: http://svn.apache.org/viewvc?rev=1292607&view=rev
Log:
BIGTOP-401. unable to run hadoop-mapreduce-historyserver
Added:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-hdfs-datanode.svc
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-hdfs-namenode.svc
- copied, changed from r1245782,
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-hdfs-secondarynamenode.svc
- copied, changed from r1245782,
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.default
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.svc
- copied, changed from r1245782,
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-yarn-nodemanager.svc
- copied, changed from r1245782,
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-yarn-resourcemanager.svc
- copied, changed from r1245782,
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/init.d.tmpl
(with props)
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SOURCES/.gitignore
Removed:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/hadoop/service-init.d-yarn.tpl
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/hadoop/service-init.d.tpl
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SOURCES/hadoop-init.tmpl
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SOURCES/hadoop-init.tmpl.suse
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SOURCES/yarn-init.tmpl
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.install
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/hadoop/rules
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
Added:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-hdfs-datanode.svc
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-hdfs-datanode.svc?rev=1292607&view=auto
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-hdfs-datanode.svc
(added)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-hdfs-datanode.svc
Thu Feb 23 01:09:01 2012
@@ -0,0 +1,66 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+HADOOP_NAME="datanode"
+DAEMON="hadoop-hdfs-$HADOOP_NAME"
+DESC="Hadoop $HADOOP_NAME"
+EXEC_PATH="/usr/lib/hadoop/sbin/hadoop-daemon.sh"
+SVC_USER="hdfs"
+DAEMON_FLAGS="$HADOOP_NAME"
+CONF_DIR="/etc/hadoop/conf"
+PIDFILE="/var/run/hadoop/hadoop-$SVC_USER-$HADOOP_NAME.pid"
+LOCKFILE="/var/lock/subsys/hadoop-$HADOOP_NAME"
+
+CHKCONFIG="2345 85 15"
+INIT_DEFAULT_START="3 4 5"
+INIT_DEFAULT_STOP="0 1 2 6"
+
+generate_start() {
+
+cat <<'__EOT__'
+start() {
+ [ -x $EXEC_PATH ] || exit $ERROR_PROGRAM_NOT_INSTALLED
+ [ -d $CONF_DIR ] || exit $ERROR_PROGRAM_NOT_CONFIGURED
+ log_success_msg "Starting ${DESC}: "
+
+ TARGET_USER_NAME="HADOOP_`echo datanode | tr a-z A-Z`_USER"
+ TARGET_USER=$(eval "echo \$$TARGET_USER_NAME")
+
+ # The following needs to be removed once HDFS-1943 gets finally put to rest.
+ # The logic of this ugly hack is this: IFF we do NOT have jsvc installed it
is
+ # guaranteed that we can NOT be running in a secure mode and thus we need to
+ # workaround HDFS-1943 (start as non-root). As soon as jsvc gets installed
+ # we are assuming a secure installation and starting a data node as root.
+ # This leaves 2 corner cases:
+ # 1. HADOOP_DATANODE_USER being set to root
+ # 2. jsvc is installed but Hadoop is configures to run in an unsecure mode
+ # Both will currently fail
+ if [ -f $HADOOP_PREFIX/libexec/jsvc ] && [ -n "$HADOOP_SECURE_DN_USER" ];
then
+ TARGET_USER=root
+ fi
+
+ export HADOOP_IDENT_STRING=$TARGET_USER
+ su -s /bin/bash $TARGET_USER -c "$EXEC_PATH --config '$CONF_DIR' start
$DAEMON_FLAGS"
+
+ # Some processes are slow to start
+ sleep $SLEEP_TIME
+ checkstatusofproc
+ RETVAL=$?
+
+ [ $RETVAL -eq $RETVAL_SUCCESS ] && touch $LOCKFILE
+ return $RETVAL
+}
+__EOT__
+
+}
Copied:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-hdfs-namenode.svc
(from r1245782,
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default)
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-hdfs-namenode.svc?p2=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-hdfs-namenode.svc&p1=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default&r1=1245782&r2=1292607&rev=1292607&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-hdfs-namenode.svc
Thu Feb 23 01:09:01 2012
@@ -12,21 +12,30 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-export HADOOP_HOME_WARN_SUPPRESS=true
-export HADOOP_HOME=/usr/lib/hadoop
-export HADOOP_PREFIX=/usr/lib/hadoop
-export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
-export HADOOP_NAMENODE_USER=hdfs
-export HADOOP_SECONDARYNAMENODE_USER=hdfs
-export HADOOP_DATANODE_USER=hdfs
-export HADOOP_JOBTRACKER_USER=mapred
-export HADOOP_TASKTRACKER_USER=mapred
-export HADOOP_IDENT_STRING=hdfs
-export HADOOP_PID_DIR=/var/run/hadoop
-export HADOOP_LOG_DIR=/var/log/hadoop
-export HADOOP_CONF_DIR=/etc/hadoop/conf
-export
HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${HADOOP_PREFIX}/*:${HADOOP_PREFIX}/lib/*:${HADOOP_PREFIX}/"
+HADOOP_NAME="namenode"
+DAEMON="hadoop-hdfs-$HADOOP_NAME"
+DESC="Hadoop $HADOOP_NAME"
+EXEC_PATH="/usr/lib/hadoop/sbin/hadoop-daemon.sh"
+SVC_USER="hdfs"
+DAEMON_FLAGS="$HADOOP_NAME"
+CONF_DIR="/etc/hadoop/conf"
+PIDFILE="/var/run/hadoop/hadoop-$SVC_USER-$HADOOP_NAME.pid"
+LOCKFILE="/var/lock/subsys/hadoop-$HADOOP_NAME"
-# Pointless definitions making yarn happy
-export HADOOP_COMMON_HOME=${HADOOP_PREFIX}
-export HADOOP_HDFS_HOME=${HADOOP_PREFIX}
+CHKCONFIG="2345 85 15"
+INIT_DEFAULT_START="3 4 5"
+INIT_DEFAULT_STOP="0 1 2 6"
+
+generate_extra_commands() {
+
+cat <<'__EOT__'
+ upgrade|rollback)
+ DAEMON_FLAGS="$DAEMON_FLAGS -${1}"
+ start
+ ;;
+ *)
+ echo $"Usage: $0
{start|stop|status|restart|try-restart|condrestart|upgrade|rollback}"
+ exit 1
+__EOT__
+
+}
Copied:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-hdfs-secondarynamenode.svc
(from r1245782,
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default)
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-hdfs-secondarynamenode.svc?p2=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-hdfs-secondarynamenode.svc&p1=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default&r1=1245782&r2=1292607&rev=1292607&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-hdfs-secondarynamenode.svc
Thu Feb 23 01:09:01 2012
@@ -12,21 +12,16 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-export HADOOP_HOME_WARN_SUPPRESS=true
-export HADOOP_HOME=/usr/lib/hadoop
-export HADOOP_PREFIX=/usr/lib/hadoop
-export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
-export HADOOP_NAMENODE_USER=hdfs
-export HADOOP_SECONDARYNAMENODE_USER=hdfs
-export HADOOP_DATANODE_USER=hdfs
-export HADOOP_JOBTRACKER_USER=mapred
-export HADOOP_TASKTRACKER_USER=mapred
-export HADOOP_IDENT_STRING=hdfs
-export HADOOP_PID_DIR=/var/run/hadoop
-export HADOOP_LOG_DIR=/var/log/hadoop
-export HADOOP_CONF_DIR=/etc/hadoop/conf
-export
HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${HADOOP_PREFIX}/*:${HADOOP_PREFIX}/lib/*:${HADOOP_PREFIX}/"
+HADOOP_NAME="secondarynamenode"
+DAEMON="hadoop-hdfs-$HADOOP_NAME"
+DESC="Hadoop $HADOOP_NAME"
+EXEC_PATH="/usr/lib/hadoop/sbin/hadoop-daemon.sh"
+SVC_USER="hdfs"
+DAEMON_FLAGS="$HADOOP_NAME"
+CONF_DIR="/etc/hadoop/conf"
+PIDFILE="/var/run/hadoop/hadoop-$SVC_USER-$HADOOP_NAME.pid"
+LOCKFILE="/var/lock/subsys/hadoop-$HADOOP_NAME"
-# Pointless definitions making yarn happy
-export HADOOP_COMMON_HOME=${HADOOP_PREFIX}
-export HADOOP_HDFS_HOME=${HADOOP_PREFIX}
+CHKCONFIG="2345 85 15"
+INIT_DEFAULT_START="3 4 5"
+INIT_DEFAULT_STOP="0 1 2 6"
Added:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc?rev=1292607&view=auto
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc
(added)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc
Thu Feb 23 01:09:01 2012
@@ -0,0 +1,70 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+HADOOP_NAME="httpfs"
+DAEMON="hadoop-$HADOOP_NAME"
+DESC="Hadoop $HADOOP_NAME"
+EXEC_PATH="/usr/lib/hadoop-httpfs/sbin/httpfs.sh"
+SVC_USER="$HADOOP_NAME"
+DAEMON_FLAGS="$HADOOP_NAME"
+CONF_DIR="/etc/$DAEMON/conf"
+PIDFILE="/var/run/$DAEMON/hadoop-$SVC_USER-$HADOOP_NAME.pid"
+LOCKFILE="/var/lock/subsys/$DAEMON"
+
+CHKCONFIG="345 90 10"
+INIT_DEFAULT_START="3 4 5"
+INIT_DEFAULT_STOP="0 1 2 6"
+
+generate_start() {
+
+cat <<'__EOT__'
+start() {
+ [ -x $EXEC_PATH ] || exit $ERROR_PROGRAM_NOT_INSTALLED
+ [ -d $CONF_DIR ] || exit $ERROR_PROGRAM_NOT_CONFIGURED
+ log_success_msg "Starting ${DESC}: "
+
+ export HTTPFS_USER="$SVC_USER"
+ export HTTPFS_CONFIG="$CONF_DIR"
+ export HTTPFS_LOG=${HTTPFS_LOG:-"/var/log/hadoop-httpfs/"}
+ export HTTPFS_TEMP="$(dirname $PIDFILE)"
+ export HTTPFS_SLEEP_TIME="$SLEEP_TIME"
+ export CATALINA_BASE=${CATALINA_BASE:-"/usr/lib/hadoop-httpfs"}
+ export CATALINA_PID="$PIDFILE"
+ export CATALINA_TMPDIR="$HTTPFS_TEMP"
+
+ su -s /bin/bash -c "${EXEC_PATH} start $DAEMON_FLAGS" $HTTPFS_USER
+
+ checkstatusofproc
+ RETVAL=$?
+
+ [ $RETVAL -eq $RETVAL_SUCCESS ] && touch $LOCKFILE
+ return $RETVAL
+}
+__EOT__
+
+}
+
+generate_stop() {
+
+cat <<'__EOT__'
+stop() {
+ log_success_msg "Stopping ${DESC}: "
+ su -s /bin/bash $SVC_USER -c "${EXEC_PATH} stop $SLEEP_TIME -force"
+ RETVAL=$?
+
+ [ $RETVAL -eq $RETVAL_SUCCESS ] && rm -f $LOCKFILE $PIDFILE
+}
+__EOT__
+
+}
Added:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.default
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.default?rev=1292607&view=auto
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.default
(added)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.default
Thu Feb 23 01:09:01 2012
@@ -0,0 +1,21 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# FIXME: MAPREDUCE-3900
+export YARN_HOME=/usr/lib/hadoop
+export YARN_IDENT_STRING=mapreduce
+export YARN_PID_DIR=/var/run/mapreduce
+export YARN_LOG_DIR=/var/log/mapreduce
+export YARN_CONF_DIR=/etc/hadoop/conf
Copied:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.svc
(from r1245782,
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default)
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.svc?p2=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.svc&p1=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default&r1=1245782&r2=1292607&rev=1292607&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.svc
Thu Feb 23 01:09:01 2012
@@ -12,21 +12,16 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-export HADOOP_HOME_WARN_SUPPRESS=true
-export HADOOP_HOME=/usr/lib/hadoop
-export HADOOP_PREFIX=/usr/lib/hadoop
-export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
-export HADOOP_NAMENODE_USER=hdfs
-export HADOOP_SECONDARYNAMENODE_USER=hdfs
-export HADOOP_DATANODE_USER=hdfs
-export HADOOP_JOBTRACKER_USER=mapred
-export HADOOP_TASKTRACKER_USER=mapred
-export HADOOP_IDENT_STRING=hdfs
-export HADOOP_PID_DIR=/var/run/hadoop
-export HADOOP_LOG_DIR=/var/log/hadoop
-export HADOOP_CONF_DIR=/etc/hadoop/conf
-export
HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${HADOOP_PREFIX}/*:${HADOOP_PREFIX}/lib/*:${HADOOP_PREFIX}/"
+HADOOP_NAME="historyserver"
+DAEMON="hadoop-mapreduce-$HADOOP_NAME"
+DESC="Hadoop $HADOOP_NAME"
+EXEC_PATH="/usr/lib/hadoop/sbin/mr-jobhistory-daemon.sh"
+SVC_USER="mapreduce"
+DAEMON_FLAGS="$HADOOP_NAME"
+CONF_DIR="/etc/hadoop/conf"
+PIDFILE="/var/run/mapreduce/yarn-$SVC_USER-$HADOOP_NAME.pid"
+LOCKFILE="/var/lock/subsys/hadoop-$HADOOP_NAME"
-# Pointless definitions making yarn happy
-export HADOOP_COMMON_HOME=${HADOOP_PREFIX}
-export HADOOP_HDFS_HOME=${HADOOP_PREFIX}
+CHKCONFIG="345 85 15"
+INIT_DEFAULT_START="3 4 5"
+INIT_DEFAULT_STOP="0 1 2 6"
Copied:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-yarn-nodemanager.svc
(from r1245782,
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default)
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-yarn-nodemanager.svc?p2=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-yarn-nodemanager.svc&p1=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default&r1=1245782&r2=1292607&rev=1292607&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-yarn-nodemanager.svc
Thu Feb 23 01:09:01 2012
@@ -12,21 +12,16 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-export HADOOP_HOME_WARN_SUPPRESS=true
-export HADOOP_HOME=/usr/lib/hadoop
-export HADOOP_PREFIX=/usr/lib/hadoop
-export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
-export HADOOP_NAMENODE_USER=hdfs
-export HADOOP_SECONDARYNAMENODE_USER=hdfs
-export HADOOP_DATANODE_USER=hdfs
-export HADOOP_JOBTRACKER_USER=mapred
-export HADOOP_TASKTRACKER_USER=mapred
-export HADOOP_IDENT_STRING=hdfs
-export HADOOP_PID_DIR=/var/run/hadoop
-export HADOOP_LOG_DIR=/var/log/hadoop
-export HADOOP_CONF_DIR=/etc/hadoop/conf
-export
HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${HADOOP_PREFIX}/*:${HADOOP_PREFIX}/lib/*:${HADOOP_PREFIX}/"
+HADOOP_NAME="nodemanager"
+DAEMON="hadoop-yarn-$HADOOP_NAME"
+DESC="Hadoop $HADOOP_NAME"
+EXEC_PATH="/usr/lib/hadoop/sbin/yarn-daemon.sh"
+SVC_USER="yarn"
+DAEMON_FLAGS="$HADOOP_NAME"
+CONF_DIR="/etc/hadoop/conf"
+PIDFILE="/var/run/yarn/yarn-$SVC_USER-$HADOOP_NAME.pid"
+LOCKFILE="/var/lock/subsys/hadoop-$HADOOP_NAME"
-# Pointless definitions making yarn happy
-export HADOOP_COMMON_HOME=${HADOOP_PREFIX}
-export HADOOP_HDFS_HOME=${HADOOP_PREFIX}
+CHKCONFIG="345 85 15"
+INIT_DEFAULT_START="3 4 5"
+INIT_DEFAULT_STOP="0 1 2 6"
Copied:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-yarn-resourcemanager.svc
(from r1245782,
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default)
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-yarn-resourcemanager.svc?p2=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-yarn-resourcemanager.svc&p1=incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default&r1=1245782&r2=1292607&rev=1292607&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop-yarn-resourcemanager.svc
Thu Feb 23 01:09:01 2012
@@ -12,21 +12,16 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-export HADOOP_HOME_WARN_SUPPRESS=true
-export HADOOP_HOME=/usr/lib/hadoop
-export HADOOP_PREFIX=/usr/lib/hadoop
-export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
-export HADOOP_NAMENODE_USER=hdfs
-export HADOOP_SECONDARYNAMENODE_USER=hdfs
-export HADOOP_DATANODE_USER=hdfs
-export HADOOP_JOBTRACKER_USER=mapred
-export HADOOP_TASKTRACKER_USER=mapred
-export HADOOP_IDENT_STRING=hdfs
-export HADOOP_PID_DIR=/var/run/hadoop
-export HADOOP_LOG_DIR=/var/log/hadoop
-export HADOOP_CONF_DIR=/etc/hadoop/conf
-export
HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${HADOOP_PREFIX}/*:${HADOOP_PREFIX}/lib/*:${HADOOP_PREFIX}/"
+HADOOP_NAME="resourcemanager"
+DAEMON="hadoop-yarn-$HADOOP_NAME"
+DESC="Hadoop $HADOOP_NAME"
+EXEC_PATH="/usr/lib/hadoop/sbin/yarn-daemon.sh"
+SVC_USER="yarn"
+DAEMON_FLAGS="$HADOOP_NAME"
+CONF_DIR="/etc/hadoop/conf"
+PIDFILE="/var/run/yarn/yarn-$SVC_USER-$HADOOP_NAME.pid"
+LOCKFILE="/var/lock/subsys/hadoop-$HADOOP_NAME"
-# Pointless definitions making yarn happy
-export HADOOP_COMMON_HOME=${HADOOP_PREFIX}
-export HADOOP_HDFS_HOME=${HADOOP_PREFIX}
+CHKCONFIG="345 85 15"
+INIT_DEFAULT_START="3 4 5"
+INIT_DEFAULT_STOP="0 1 2 6"
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default?rev=1292607&r1=1292606&r2=1292607&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/hadoop.default
Thu Feb 23 01:09:01 2012
@@ -30,3 +30,9 @@ export HADOOP_CLASSPATH="${HADOOP_CLASSP
# Pointless definitions making yarn happy
export HADOOP_COMMON_HOME=${HADOOP_PREFIX}
export HADOOP_HDFS_HOME=${HADOOP_PREFIX}
+export YARN_HOME=/usr/lib/hadoop
+export YARN_IDENT_STRING=yarn
+export YARN_PID_DIR=/var/run/yarn
+export YARN_LOG_DIR=/var/log/yarn
+export YARN_CONF_DIR=/etc/hadoop/conf
+export YARN_CLASSPATH="${YARN_CLASSPATH}:${YARN_HOME}/*:${YARN_HOME}/lib/*"
Added:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/init.d.tmpl
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/init.d.tmpl?rev=1292607&view=auto
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/init.d.tmpl
(added)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/init.d.tmpl
Thu Feb 23 01:09:01 2012
@@ -0,0 +1,311 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This is a poor man's templating engine for generating init.d scripts to
+# support all the Apache services that Bigtop distro has. An actual init.d
+# script gets generate via running this script under bash and giving it
+# a mandatory argument of a file containing the configuration for the service.
+# The argument file should be a valid piece of bash code since it gets directly
+# source into this template. E.g.
+# $ bash ./init.d.tmpl hadoop-hdfs-namenode.svc > hadoop-hdfs-namenode
+#
+# You must declare the following in your .svc configuration file:
+# DAEMON="name of the resulting init.d script"
+# DESC="Free form human readable description of the service"
+# EXEC_PATH="path to the upstream daemon management script"
+# SVC_USER="user to run this service as"
+# DAEMON_FLAGS="flags to be passed to the $EXEC_PATH"
+# CONF_DIR="path to the configuration directory"
+# PIDFILE="file holding a PID of the running daemon"
+# LOCKFILE="file signifying the service lock"
+#
+# CHKCONFIG="chkconfig(8) registration signature"
+# INIT_DEFAULT_START="run levels to use"
+# INIT_DEFAULT_STOP="run levels not to use ;-)"
+#
+# You can, also, override parts of the generated init.d script by providing
+# function definitions for: generate_start, generate_stop and
generate_extra_commands.
+# See the default implemenations below and feel free to customize. Also look
+# for exising .svc files in common to see how different services are tweaking
+# the defaults.
+#
+# Of course, if this whole templating thing grows too big we might need to
+# consider a real templating engine (I have played with m4, but it seems
+# qutie brittle when used for manipulating pieces of the shell code -- think
+# $0 the like).
+
+if [ $# -lt 1 ] ; then
+ echo "Usage: ${BASH_SOURCE-0} service.definition.svc"
+ exit 1
+fi
+
+generate_start() {
+
+cat <<'__EOT__'
+start() {
+ [ -x $EXEC_PATH ] || exit $ERROR_PROGRAM_NOT_INSTALLED
+ [ -d $CONF_DIR ] || exit $ERROR_PROGRAM_NOT_CONFIGURED
+ log_success_msg "Starting ${DESC}: "
+
+ TARGET_USER_NAME="HADOOP_`echo datanode | tr a-z A-Z`_USER"
+ TARGET_USER=$(eval "echo \$$TARGET_USER_NAME")
+
+ # The following needs to be removed once HDFS-1943 gets finally put to rest.
+ # The logic of this ugly hack is this: IFF we do NOT have jsvc installed it
is
+ # guaranteed that we can NOT be running in a secure mode and thus we need to
+ # workaround HDFS-1943 (start as non-root). As soon as jsvc gets installed
+ # we are assuming a secure installation and starting a data node as root.
+ # This leaves 2 corner cases:
+ # 1. HADOOP_DATANODE_USER being set to root
+ # 2. jsvc is installed but Hadoop is configures to run in an unsecure mode
+ # Both will currently fail
+ if [ -f $HADOOP_PREFIX/libexec/jsvc ] && [ -n "$HADOOP_SECURE_DN_USER" ];
then
+ TARGET_USER=root
+ fi
+
+ export HADOOP_IDENT_STRING=$TARGET_USER
+ su -s /bin/bash $TARGET_USER -c "$EXEC_PATH --config '$CONF_DIR' start
$DAEMON_FLAGS"
+
+ # Some processes are slow to start
+ sleep $SLEEP_TIME
+ checkstatusofproc
+ RETVAL=$?
+
+ [ $RETVAL -eq $RETVAL_SUCCESS ] && touch $LOCKFILE
+ return $RETVAL
+}
+__EOT__
+
+}
+generate_start() {
+
+cat <<'__EOT__'
+
+start() {
+ [ -x $EXEC_PATH ] || exit $ERROR_PROGRAM_NOT_INSTALLED
+ [ -d $CONF_DIR ] || exit $ERROR_PROGRAM_NOT_CONFIGURED
+ log_success_msg "Starting ${DESC}: "
+
+ su -s /bin/bash $SVC_USER -c "$EXEC_PATH --config '$CONF_DIR' start
$DAEMON_FLAGS"
+
+ # Some processes are slow to start
+ sleep $SLEEP_TIME
+ checkstatusofproc
+ RETVAL=$?
+
+ [ $RETVAL -eq $RETVAL_SUCCESS ] && touch $LOCKFILE
+ return $RETVAL
+}
+
+__EOT__
+
+}
+
+generate_stop() {
+
+cat <<'__EOT__'
+
+stop() {
+ log_success_msg "Stopping ${DESC}: "
+ start_daemon $EXEC_PATH --config "$CONF_DIR" stop $DAEMON_FLAGS
+ RETVAL=$?
+
+ [ $RETVAL -eq $RETVAL_SUCCESS ] && rm -f $LOCKFILE $PIDFILE
+}
+
+__EOT__
+
+}
+
+generate_extra_commands() {
+
+cat <<'__EOT__'
+ *)
+ echo $"Usage: $0 {start|stop|status|restart|try-restart|condrestart}"
+ exit 1
+__EOT__
+
+}
+
+###################################################################
+# NOTE how we are sourcing the argument here so that a user-defined
+# settings have a chance to override the default values for
+# generate_start, generate_stop and generate_extra_commands. If you
+# ever want to make this template even more flexible -- define the
+# default values above this line
+. $1
+
+cat <<__EOT__
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Starts a $DESC
+#
+# chkconfig: $CHKCONFIG
+# description: $DESC
+#
+### BEGIN INIT INFO
+# Provides: $DAEMON
+# Short-Description: $DESC
+# Default-Start: $INIT_DEFAULT_START
+# Default-Stop: $INIT_DEFAULT_STOP
+# Required-Start: \$syslog \$remote_fs
+# Required-Stop: \$syslog \$remote_fs
+# Should-Start:
+# Should-Stop:
+### END INIT INFO
+
+. /lib/lsb/init-functions
+. /etc/default/hadoop
+
+if [ -f /etc/default/$DAEMON ] ; then
+ . /etc/default/$DAEMON
+fi
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+ . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+ . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+
+RETVAL_SUCCESS=0
+
+STATUS_RUNNING=0
+STATUS_DEAD=1
+STATUS_DEAD_AND_LOCK=2
+STATUS_NOT_RUNNING=3
+STATUS_OTHER_ERROR=102
+
+
+ERROR_PROGRAM_NOT_INSTALLED=5
+ERROR_PROGRAM_NOT_CONFIGURED=6
+
+
+RETVAL=0
+SLEEP_TIME=5
+PROC_NAME="su"
+
+DAEMON="$DAEMON"
+DESC="$DESC"
+EXEC_PATH="$EXEC_PATH"
+SVC_USER="$SVC_USER"
+DAEMON_FLAGS="$DAEMON_FLAGS"
+CONF_DIR="$CONF_DIR"
+PIDFILE="$PIDFILE"
+LOCKFILE="$LOCKFILE"
+
+install -d -m 0755 -o $SVC_USER -g $SVC_USER $(dirname $PIDFILE) 1>/dev/null
2>&1 || :
+__EOT__
+
+generate_start
+generate_stop
+
+cat <<'__EOT__'
+restart() {
+ stop
+ start
+}
+
+checkstatusofproc(){
+ pidofproc -p $PIDFILE $PROC_NAME > /dev/null
+}
+
+checkstatus(){
+ checkstatusofproc
+ status=$?
+
+ case "$status" in
+ $STATUS_RUNNING)
+ log_success_msg "${DESC} is running"
+ ;;
+ $STATUS_DEAD)
+ log_failure_msg "${DESC} is dead and pid file exists"
+ ;;
+ $STATUS_DEAD_AND_LOCK)
+ log_failure_msg "${DESC} is dead and lock file exists"
+ ;;
+ $STATUS_NOT_RUNNING)
+ log_failure_msg "${DESC} is not running"
+ ;;
+ *)
+ log_failure_msg "${DESC} status is unknown"
+ ;;
+ esac
+ return $status
+}
+
+condrestart(){
+ [ -e $LOCKFILE ] && restart || :
+}
+
+check_for_root() {
+ if [ $(id -ur) -ne 0 ]; then
+ echo 'Error: root user required'
+ echo
+ exit 1
+ fi
+}
+
+service() {
+ case "$1" in
+ start)
+ check_for_root
+ start
+ ;;
+ stop)
+ check_for_root
+ stop
+ ;;
+ status)
+ checkstatus
+ RETVAL=$?
+ ;;
+ restart)
+ check_for_root
+ restart
+ ;;
+ condrestart|try-restart)
+ check_for_root
+ condrestart
+ ;;
+__EOT__
+
+generate_extra_commands
+
+cat <<'__EOT__'
+ esac
+}
+
+service "$1"
+
+exit $RETVAL
+__EOT__
Propchange:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/init.d.tmpl
------------------------------------------------------------------------------
svn:executable = *
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.install
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.install?rev=1292607&r1=1292606&r2=1292607&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.install
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.install
Thu Feb 23 01:09:01 2012
@@ -1,8 +1,10 @@
/etc/security/limits.d/mapreduce.conf
/usr/lib/hadoop/hadoop-mapreduce*.jar
/usr/lib/hadoop/hadoop-streaming*.jar
+/usr/lib/hadoop/hadoop-extras*.jar
/usr/lib/hadoop/libexec/mapred-config.sh
/usr/lib/hadoop/bin/mapred
+/usr/lib/hadoop/sbin/mr-jobhistory-daemon.sh
/usr/bin/mapred
# FIXME: HADOOP-7939
/var/log/mapreduce
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/hadoop/rules
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/hadoop/rules?rev=1292607&r1=1292606&r2=1292607&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/hadoop/rules
(original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/deb/hadoop/rules
Thu Feb 23 01:09:01 2012
@@ -73,36 +73,18 @@ install: build
# Run dh_lintian if we've got it - this doesn't exist on debhelper 6 (eg
Ubuntu Hardy)
(dh_lintian) || /bin/true
-
-namenode_user=hdfs
-secondarynamenode_user=hdfs
-datanode_user=hdfs
-resourcemanager_user=yarn
-nodemanager_user=yarn
-
-httpfs: debian/service-hadoop-httpfs
- cp $< debian/[email protected]
-
-hdfs-namenode hdfs-secondarynamenode hdfs-datanode mapreduce-historyserver:
debian/service-init.d.tpl
- sed -e "s|@HADOOP_DAEMON@|$(patsubst mapreduce-%,%,$(patsubst
hdfs-%,%,$@))|" \
- -e "s|@HADOOP_MAJOR_VERSION@|$hadoop_version|" \
- -e "s|@DAEMON_USER@|$($@_user)|" $< > debian/[email protected]
- # FIXME: workaround for BIGTOP-105
- sed -e "s|@HADOOP_DAEMON@|$@|" < debian/hadoop.daemon.postinst.tpl >
debian/[email protected]
-
-yarn-resourcemanager yarn-nodemanager: debian/service-init.d-yarn.tpl
- sed -e "s|@HADOOP_DAEMON@|$(patsubst yarn-%,%,$@)|" \
- -e "s|@HADOOP_MAJOR_VERSION@|$hadoop_version|" \
- -e "s|@DAEMON_USER@|$($@_user)|" $< > debian/[email protected]
+hdfs-namenode hdfs-secondarynamenode hdfs-datanode yarn-resourcemanager
yarn-nodemanager mapreduce-historyserver httpfs: debian/init.d.tmpl
+ bash $< debian/[email protected] > debian/[email protected]
# FIXME: workaround for BIGTOP-105
- sed -e "s|@HADOOP_DAEMON@|$@|" < debian/hadoop.daemon.postinst.tpl >
debian/[email protected]
-
+ [ -f debian/[email protected] ] || cp
debian/hadoop.daemon.postinst.tpl debian/[email protected]
+ sed -i -e "s|@HADOOP_DAEMON@|$@|" debian/[email protected]
install-indep: hdfs-namenode hdfs-secondarynamenode hdfs-datanode
yarn-resourcemanager yarn-nodemanager mapreduce-historyserver httpfs
mkdir -p debian/tmp/etc/default
cp debian/hadoop.default debian/tmp/etc/default/hadoop
cp debian/yarn.default debian/tmp/etc/default/yarn
cp debian/hadoop-httpfs.default debian/tmp/etc/default/hadoop-httpfs
+ cp debian/hadoop-mapreduce-historyserver.default
debian/tmp/etc/default/hadoop-mapreduce-historyserver
dh_install --sourcedir=debian/tmp -i
Added:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SOURCES/.gitignore
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SOURCES/.gitignore?rev=1292607&view=auto
==============================================================================
(empty)
Modified:
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
URL:
http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec?rev=1292607&r1=1292606&r2=1292607&view=diff
==============================================================================
---
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
(original)
+++
incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
Thu Feb 23 01:09:01 2012
@@ -51,10 +51,11 @@
%define bin_hadoop %{_bindir}
%define man_hadoop %{_mandir}
%define doc_hadoop %{_docdir}/%{name}-%{hadoop_version}
+%define httpfs_services httpfs
%define mapreduce_services mapreduce-historyserver
%define hdfs_services hdfs-namenode hdfs-secondarynamenode hdfs-datanode
%define yarn_services yarn-resourcemanager yarn-nodemanager
-%define hadoop_services %{hdfs_services} %{mapreduce_services} %{yarn_services}
+%define hadoop_services %{hdfs_services} %{mapreduce_services}
%{yarn_services} %{httpfs_services}
# Hadoop outputs built binaries into %{hadoop_build}
%define hadoop_build_path build
%define static_images_dir src/webapps/static/images
@@ -137,17 +138,22 @@ Source0: %{name}-%{hadoop_base_version}.
Source1: do-component-build
Source2: install_%{name}.sh
Source3: hadoop.default
-Source4: hadoop-init.tmpl
-Source5: hadoop-init.tmpl.suse
+Source4: hadoop-fuse.default
+Source5: hadoop-httpfs.default
Source6: hadoop.1
Source7: hadoop-fuse-dfs.1
-Source8: hadoop-fuse.default
-Source9: hdfs.conf
-Source10: yarn-init.tmpl
-Source11: hadoop-httpfs.default
-Source12: service-hadoop-httpfs
-Source13: yarn.conf
-Source14: mapreduce.conf
+Source8: hdfs.conf
+Source9: yarn.conf
+Source10: mapreduce.conf
+Source11: init.d.tmpl
+Source12: hadoop-hdfs-namenode.svc
+Source13: hadoop-hdfs-datanode.svc
+Source14: hadoop-hdfs-secondarynamenode.svc
+Source15: hadoop-mapreduce-historyserver.svc
+Source16: hadoop-yarn-resourcemanager.svc
+Source17: hadoop-yarn-nodemanager.svc
+Source18: hadoop-httpfs.svc
+Source19: hadoop-mapreduce-historyserver.default
Buildroot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id} -u -n)
BuildRequires: python >= 2.4, git, fuse-devel,fuse, automake, autoconf
Requires: coreutils, /usr/sbin/useradd, /usr/sbin/usermod, /sbin/chkconfig,
/sbin/service, bigtop-utils
@@ -378,51 +384,25 @@ bash %{SOURCE2} \
# Init.d scripts
%__install -d -m 0755 $RPM_BUILD_ROOT/%{initd_dir}/
-
-%if %{?suse_version:1}0
-orig_init_file=$RPM_SOURCE_DIR/hadoop-init.tmpl.suse
-%else
-orig_init_file=$RPM_SOURCE_DIR/hadoop-init.tmpl
-%endif
-
-yarn_orig_init_file=$RPM_SOURCE_DIR/yarn-init.tmpl
-httpfs_orig_init_file=$RPM_SOURCE_DIR/service-hadoop-httpfs
-
# Generate the init.d scripts
-for service in %{hdfs_services} %{mapreduce_services}
+for service in %{hadoop_services}
do
init_file=$RPM_BUILD_ROOT/%{initd_dir}/%{name}-${service}
- %__cp $orig_init_file $init_file
- %__sed -i -e 's|@HADOOP_COMMON_ROOT@|%{lib_hadoop}|' $init_file
- %__sed -i -e "s|@HADOOP_DAEMON@|${service#*-}|" $init_file
- %__sed -i -e 's|@HADOOP_CONF_DIR@|%{config_hadoop}|' $init_file
- %__sed -i -e 's|@HADOOP_DAEMON_USER@|hdfs|' $init_file
+ bash $RPM_SOURCE_DIR/init.d.tmpl $RPM_SOURCE_DIR/%{name}-${service}.svc
> $init_file
chmod 755 $init_file
done
-for service in %{yarn_services}
-do
- init_file=$RPM_BUILD_ROOT/%{initd_dir}/%{name}-${service}
- %__cp $yarn_orig_init_file $init_file
- %__sed -i -e 's|@YARN_COMMON_ROOT@|%{lib_hadoop}|' $init_file
- %__sed -i -e "s|@YARN_DAEMON@|${service#yarn-}|" $init_file
- %__sed -i -e 's|@YARN_CONF_DIR@|%{config_hadoop}|' $init_file
- %__sed -i -e 's|@YARN_DAEMON_USER@|yarn|' $init_file
- chmod 755 $init_file
-done
-%__cp $httpfs_orig_init_file $RPM_BUILD_ROOT/%{initd_dir}/%{name}-httpfs
-chmod 755 $RPM_BUILD_ROOT/%{initd_dir}/%{name}-httpfs
-
%__install -d -m 0755 $RPM_BUILD_ROOT/etc/default
%__cp $RPM_SOURCE_DIR/hadoop.default $RPM_BUILD_ROOT/etc/default/hadoop
%__cp $RPM_SOURCE_DIR/yarn.default $RPM_BUILD_ROOT/etc/default/yarn
%__cp $RPM_SOURCE_DIR/%{name}-fuse.default
$RPM_BUILD_ROOT/etc/default/%{name}-fuse
%__cp $RPM_SOURCE_DIR/%{name}-httpfs.default
$RPM_BUILD_ROOT/etc/default/%{name}-httpfs
+%__cp $RPM_SOURCE_DIR/%{name}-mapreduce-historyserver.default
$RPM_BUILD_ROOT/etc/default/%{name}-mapreduce-historyserver
%__install -d -m 0755 $RPM_BUILD_ROOT/etc/security/limits.d
-%__install -m 0644 %{SOURCE9} $RPM_BUILD_ROOT/etc/security/limits.d/hdfs.conf
-%__install -m 0644 %{SOURCE13} $RPM_BUILD_ROOT/etc/security/limits.d/yarn.conf
-%__install -m 0644 %{SOURCE14}
$RPM_BUILD_ROOT/etc/security/limits.d/mapreduce.conf
+%__install -m 0644 %{SOURCE8} $RPM_BUILD_ROOT/etc/security/limits.d/hdfs.conf
+%__install -m 0644 %{SOURCE9} $RPM_BUILD_ROOT/etc/security/limits.d/yarn.conf
+%__install -m 0644 %{SOURCE10}
$RPM_BUILD_ROOT/etc/security/limits.d/mapreduce.conf
# /var/lib/*/cache
%__install -d -m 1777 $RPM_BUILD_ROOT/%{state_hadoop}/cache
@@ -490,7 +470,7 @@ if [ "$1" = 0 ]; then
fi
%preun httpfs
-%{alternatives_cmd} --remove %{name}-httpfs-conf %{etc_httpfs}/conf.empty 10
+%{alternatives_cmd} --remove %{name}-httpfs-conf %{etc_httpfs}/conf.empty || :
if [ $1 = 0 ]; then
service %{name}-httpfs stop > /dev/null 2>&1
chkconfig --del %{name}-httpfs
@@ -551,11 +531,16 @@ fi
%files mapreduce
%defattr(-,root,root)
+%config(noreplace) /etc/default/hadoop-mapreduce-historyserver
%config(noreplace) /etc/security/limits.d/mapreduce.conf
%{lib_hadoop}/hadoop-mapreduce*.jar
%{lib_hadoop}/hadoop-streaming*.jar
+%{lib_hadoop}/hadoop-extras*.jar
+%{lib_hadoop}/hadoop-distcp*.jar
+%{lib_hadoop}/hadoop-rumen*.jar
%{lib_hadoop}/libexec/mapred-config.sh
%{lib_hadoop}/bin/mapred
+%{lib_hadoop}/sbin/mr-jobhistory-daemon.sh
%{bin_hadoop}/mapred
%attr(0775,mapreduce,hadoop) %{run_mapreduce}
%attr(0775,mapreduce,hadoop) %{log_mapreduce}
@@ -565,6 +550,7 @@ fi
%files
%defattr(-,root,root)
+%config(noreplace) %{etc_hadoop}/conf.empty/core-site.xml
%config(noreplace) %{etc_hadoop}/conf.empty/hadoop-metrics.properties
%config(noreplace) %{etc_hadoop}/conf.empty/hadoop-metrics2.properties
%config(noreplace) %{etc_hadoop}/conf.empty/log4j.properties