Author: apurtell
Date: Fri Dec 18 23:28:13 2009
New Revision: 892400
URL: http://svn.apache.org/viewvc?rev=892400&view=rev
Log:
HBASE-2032 Support for installation of user packages
Modified:
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-env.sh
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-init-remote.sh
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/image/create-hbase-image-remote
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/launch-hbase-master
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/launch-hbase-slaves
Modified:
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-env.sh
URL:
http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-env.sh?rev=892400&r1=892399&r2=892400&view=diff
==============================================================================
---
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-env.sh
(original)
+++
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-env.sh
Fri Dec 18 23:28:13 2009
@@ -48,6 +48,12 @@
# Enable public access web interfaces
ENABLE_WEB_PORTS=false
+# Extra packages
+# Allows you to add a private Yum repo and pull packages from it as your
+# instances boot up. Format is <repo-descriptor-URL> <pkg1> ... <pkgN>
+# The repository descriptor will be fetched into /etc/yum/repos.d.
+EXTRA_PACKAGES=
+
# Use only c1.xlarge unless you know what you are doing
MASTER_INSTANCE_TYPE=${MASTER_INSTANCE_TYPE:-c1.xlarge}
Modified:
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-init-remote.sh
URL:
http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-init-remote.sh?rev=892400&r1=892399&r2=892400&view=diff
==============================================================================
---
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-init-remote.sh
(original)
+++
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/hbase-ec2-init-remote.sh
Fri Dec 18 23:28:13 2009
@@ -5,8 +5,9 @@
# data, so should not exceed 16K in size.
###############################################################################
-MASTER_HOST=%MASTER_HOST%
-ZOOKEEPER_QUORUM=%ZOOKEEPER_QUORUM%
+MASTER_HOST="%MASTER_HOST%"
+ZOOKEEPER_QUORUM="%ZOOKEEPER_QUORUM%"
+EXTRA_PACKAGES="%EXTRA_PACKAGES%"
SECURITY_GROUPS=`wget -q -O -
http://169.254.169.254/latest/meta-data/security-groups`
IS_MASTER=`echo $SECURITY_GROUPS | awk '{ a = match ($0, "-master$"); if (a)
print "true"; else print "false"; }'`
if [ "$IS_MASTER" = "true" ]; then
@@ -17,9 +18,7 @@
HBASE_HOME=`ls -d /usr/local/hbase-*`
HBASE_VERSION=`echo $HBASE_HOME | cut -d '-' -f 2`
-###############################################################################
# Hadoop configuration
-###############################################################################
cat > $HADOOP_HOME/conf/hadoop-site.xml <<EOF
<?xml version="1.0"?>
@@ -62,7 +61,6 @@
EOF
# Configure Hadoop for Ganglia
-# overwrite hadoop-metrics.properties
cat > $HADOOP_HOME/conf/hadoop-metrics.properties <<EOF
dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext
dfs.period=10
@@ -75,9 +73,7 @@
mapred.servers=$MASTER_HOST:8649
EOF
-###############################################################################
# HBase configuration
-###############################################################################
cat > $HBASE_HOME/conf/hbase-site.xml <<EOF
<?xml version="1.0"?>
@@ -133,7 +129,6 @@
EOF
# Configure HBase for Ganglia
-# overwrite hadoop-metrics.properties
cat > $HBASE_HOME/conf/hadoop-metrics.properties <<EOF
dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext
dfs.period=10
@@ -146,26 +141,30 @@
jvm.servers=$MASTER_HOST:8649
EOF
-###############################################################################
# Start services
-###############################################################################
# up open file descriptor limits
echo "root soft nofile 32768" >> /etc/security/limits.conf
echo "root hard nofile 32768" >> /etc/security/limits.conf
-# up epoll limits
-# ok if this fails, only valid for kernels 2.6.27+
+# up epoll limits, only valid for kernels 2.6.27+
sysctl -w fs.epoll.max_user_instances=32768 > /dev/null 2>&1
-mkdir -p /mnt/hadoop/logs
-mkdir -p /mnt/hbase/logs
+mkdir -p /mnt/hadoop/logs /mnt/hbase/logs
[ ! -f /etc/hosts ] && echo "127.0.0.1 localhost" > /etc/hosts
-# not set on boot
export USER="root"
+if [ "$EXTRA_PACKAGES" != "" ] ; then
+ # format should be <repo-descriptor-URL> <package1> ... <packageN>
+ # this will only work with bash
+ pkg=( $EXTRA_PACKAGES )
+ wget -nv -O /etc/yum.repos.d/user.repo ${pkg[0]}
+ yum -y update yum
+ yum -y install ${p...@]:1}
+fi
+
if [ "$IS_MASTER" = "true" ]; then
# MASTER
# Prep Ganglia
@@ -185,13 +184,9 @@
[ ! -e /mnt/hadoop/dfs ] && "$HADOOP_HOME"/bin/hadoop namenode -format
"$HADOOP_HOME"/bin/hadoop-daemon.sh start namenode
-
"$HADOOP_HOME"/bin/hadoop-daemon.sh start datanode
-
"$HADOOP_HOME"/bin/hadoop-daemon.sh start jobtracker
-
sleep 10
-
"$HBASE_HOME"/bin/hbase-daemon.sh start master
else
@@ -204,11 +199,8 @@
-e "s|\(udp_send_channel {\)|\1\n host=$MASTER_HOST|" \
/etc/gmond.conf
service gmond start
-
"$HADOOP_HOME"/bin/hadoop-daemon.sh start datanode
-
"$HBASE_HOME"/bin/hbase-daemon.sh start regionserver
-
"$HADOOP_HOME"/bin/hadoop-daemon.sh start tasktracker
fi
Modified:
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/image/create-hbase-image-remote
URL:
http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/image/create-hbase-image-remote?rev=892400&r1=892399&r2=892400&view=diff
==============================================================================
---
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/image/create-hbase-image-remote
(original)
+++
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/image/create-hbase-image-remote
Fri Dec 18 23:28:13 2009
@@ -73,7 +73,7 @@
# Configure HBase
sed -i \
-e "s|# export JAVA_HOME=.*|export JAVA_HOME=/usr/local/jdk${JAVA_VERSION}|"
\
- -e 's|# export HBASE_OPTS=.*|export HBASE_OPTS="$HBASE_OPTS -server
-XX:+UseConcMarkSweepGC -XX:+DoEscapeAnalysis -XX:+AggressiveOpts"|' \
+ -e 's|# export HBASE_OPTS=.*|export HBASE_OPTS="$HBASE_OPTS -server
-XX:+HeapDumpOnOutOfMemoryError -XX:+UseConcMarkSweepGC
-XX:CMSInitiatingOccupancyFraction=90 -XX:NewSize=64m -XX:MaxNewSize=64m
-XX:+DoEscapeAnalysis -XX:+AggressiveOpts"|' \
-e 's|# export HBASE_LOG_DIR=.*|export HBASE_LOG_DIR=/mnt/hbase/logs|' \
-e 's|# export HBASE_SLAVE_SLEEP=.*|export HBASE_SLAVE_SLEEP=1|' \
/usr/local/hbase-$HBASE_VERSION/conf/hbase-env.sh
@@ -94,12 +94,12 @@
# Ensure logging in to new hosts is seamless.
echo ' StrictHostKeyChecking no' >> /etc/ssh/ssh_config
-# Install LZO for HBase
-echo "Installing LZO codec support for HBase"
-cd /usr/local/hbase-${HBASE_VERSION}
-wget -nv
http://iridiant.s3.amazonaws.com/hbase/lzo-linux-${HADOOP_VERSION}.tar.gz
-tar xzf lzo-linux-${HADOOP_VERSION}.tar.gz
-rm lzo-linux-${HADOOP_VERSION}.tar.gz
+# Install LZO
+echo "Installing LZO codec support"
+wget -nv -O /tmp/lzo-linux-${HADOOP_VERSION}.tar.gz
http://iridiant.s3.amazonaws.com/hbase/lzo-linux-${HADOOP_VERSION}.tar.gz
+cd /usr/local/hadoop-${HADOOP_VERSION} && tar xzf
/tmp/lzo-linux-${HADOOP_VERSION}.tar.gz
+cd /usr/local/hbase-${HBASE_VERSION} && tar xzf
/tmp/lzo-linux-${HADOOP_VERSION}.tar.gz
+rm -f /tmp/lzo-linux-${HADOOP_VERSION}.tar.gz
# Bundle and upload image
cd ~root
Modified:
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/launch-hbase-master
URL:
http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/launch-hbase-master?rev=892400&r1=892399&r2=892400&view=diff
==============================================================================
---
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/launch-hbase-master
(original)
+++
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/launch-hbase-master
Fri Dec 18 23:28:13 2009
@@ -59,6 +59,7 @@
# Substituting zookeeper quorum
ZOOKEEPER_QUORUM=`cat $ZOOKEEPER_QUORUM_PATH`
sed -e "s|%ZOOKEEPER_QUORUM%|$ZOOKEEPER_QUORUM|" \
+ -e "s|%EXTRA_PACKAGES%|$EXTRA_PACKAGES|" \
"$bin"/$USER_DATA_FILE > "$bin"/$USER_DATA_FILE.master
INSTANCE=`ec2-run-instances $AMI_IMAGE $TOOL_OPTS -n 1 -g $CLUSTER_MASTER -k
root -f "$bin"/$USER_DATA_FILE.master -t $type | grep INSTANCE | awk '{print
$2}'`
echo -n "Waiting for instance $INSTANCE to start"
Modified:
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/launch-hbase-slaves
URL:
http://svn.apache.org/viewvc/hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/launch-hbase-slaves?rev=892400&r1=892399&r2=892400&view=diff
==============================================================================
---
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/launch-hbase-slaves
(original)
+++
hadoop/hbase/branches/0.20_on_hadoop-0.18.3/src/contrib/ec2/bin/launch-hbase-slaves
Fri Dec 18 23:28:13 2009
@@ -50,6 +50,7 @@
# Substituting master hostname and zookeeper quorum
sed -e "s|%MASTER_HOST%|$MASTER_HOST|" \
-e "s|%ZOOKEEPER_QUORUM%|$ZOOKEEPER_QUORUM|" \
+ -e "s|%EXTRA_PACKAGES%|$EXTRA_PACKAGES|" \
"$bin"/$USER_DATA_FILE > "$bin"/$USER_DATA_FILE.slave
# Start slaves