This is an automated email from the ASF dual-hosted git repository.
sekikn pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/bigtop.git
The following commit(s) were added to refs/heads/master by this push:
new d8271c9 BIGTOP-3506. Update hadoop-httpfs resources based on
hadoop-3. (#740)
d8271c9 is described below
commit d8271c900cc7a85ff03b438f27a05fd123b7c4d9
Author: Masatake Iwasaki <[email protected]>
AuthorDate: Wed Feb 24 16:35:30 2021 +0900
BIGTOP-3506. Update hadoop-httpfs resources based on hadoop-3. (#740)
* BIGTOP-3506. Update hadoop-httpfs resources based on hadoop-3.
* fixed typo and directory owner settings.
---
.../puppet/modules/hadoop/manifests/init.pp | 8 ++--
.../puppet/modules/hadoop/templates/httpfs-env.sh | 42 ++++++++++++------
.../src/common/hadoop/hadoop-httpfs.svc | 50 +++++++---------------
bigtop-packages/src/common/hadoop/httpfs.default | 14 ++----
.../src/common/hadoop/install_hadoop.sh | 11 +----
.../src/common/hadoop/patch6-fix-httpfs-sh.diff | 15 +++++++
bigtop-packages/src/deb/hadoop/hadoop-httpfs.dirs | 1 -
.../src/deb/hadoop/hadoop-httpfs.install | 6 ++-
.../src/deb/hadoop/hadoop-httpfs.postinst | 2 +-
bigtop-packages/src/deb/hadoop/rules | 1 -
bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec | 8 ++--
11 files changed, 80 insertions(+), 78 deletions(-)
diff --git a/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp
b/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp
index 6cd2187..d62acfe 100644
--- a/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp
+++ b/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp
@@ -485,12 +485,12 @@ class hadoop ($hadoop_security_authentication = "simple",
require => Package["jdk"],
}
- file { "/etc/hadoop-httpfs/conf/httpfs-site.xml":
+ file { "/etc/hadoop/conf/httpfs-site.xml":
content => template('hadoop/httpfs-site.xml'),
require => [Package["hadoop-httpfs"]],
}
- file { "/etc/hadoop-httpfs/conf/httpfs-env.sh":
+ file { "/etc/hadoop/conf/httpfs-env.sh":
content => template('hadoop/httpfs-env.sh'),
require => [Package["hadoop-httpfs"]],
}
@@ -504,7 +504,7 @@ class hadoop ($hadoop_security_authentication = "simple",
fail("HTTPFS signature secret must be set")
}
- file { "/etc/hadoop-httpfs/conf/httpfs-signature.secret":
+ file { "/etc/hadoop/conf/httpfs-signature.secret":
content => $httpfs_signature_secret,
# it's a password file - do not filebucket
backup => false,
@@ -514,7 +514,7 @@ class hadoop ($hadoop_security_authentication = "simple",
service { "hadoop-httpfs":
ensure => running,
hasstatus => true,
- subscribe => [Package["hadoop-httpfs"],
File["/etc/hadoop-httpfs/conf/httpfs-site.xml"],
File["/etc/hadoop-httpfs/conf/httpfs-env.sh"],
File["/etc/hadoop-httpfs/conf/httpfs-signature.secret"],
+ subscribe => [Package["hadoop-httpfs"],
File["/etc/hadoop/conf/httpfs-site.xml"],
File["/etc/hadoop/conf/httpfs-env.sh"],
File["/etc/hadoop/conf/httpfs-signature.secret"],
File["/etc/hadoop/conf/core-site.xml"],
File["/etc/hadoop/conf/hdfs-site.xml"]],
require => [ Package["hadoop-httpfs"] ],
}
diff --git a/bigtop-deploy/puppet/modules/hadoop/templates/httpfs-env.sh
b/bigtop-deploy/puppet/modules/hadoop/templates/httpfs-env.sh
index 1de1114..60aa31c 100644
--- a/bigtop-deploy/puppet/modules/hadoop/templates/httpfs-env.sh
+++ b/bigtop-deploy/puppet/modules/hadoop/templates/httpfs-env.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,28 +14,46 @@
#
# Set httpfs specific environment variables here.
+#
+# hadoop-env.sh is read prior to this file.
+#
-# Settings for the Embedded Tomcat that runs HttpFS
-# Java System properties for HttpFS should be specified in this variable
+# HTTPFS config directory
#
-# export CATALINA_OPTS=
+# export HTTPFS_CONFIG=${HADOOP_CONF_DIR}
-# HttpFS logs directory
+# HTTPFS log directory
#
-# export HTTPFS_LOG=${HTTPFS_HOME}/logs
+# export HTTPFS_LOG=${HADOOP_LOG_DIR}
-# HttpFS temporary directory
+# HTTPFS temporary directory
#
-# export HTTPFS_TEMP=${HTTPFS_HOME}/temp
+# export HTTPFS_TEMP=${HADOOP_HDFS_HOME}/temp
-# The HTTP port used by HttpFS
+# The HTTP port used by HTTPFS
#
export HTTPFS_HTTP_PORT=<%= @hadoop_httpfs_port %>
-# The Admin port used by HttpFS
+# The maximum number of HTTP handler threads
#
-# export HTTPFS_ADMIN_PORT=`expr ${HTTPFS_HTTP_PORT} + 1`
+# export HTTPFS_MAX_THREADS=1000
# The hostname HttpFS server runs on
#
-# export HTTPFS_HTTP_HOSTNAME=`hostname -f`
+# export HTTPFS_HTTP_HOSTNAME=$(hostname -f)
+
+# The maximum size of HTTP header
+#
+# export HTTPFS_MAX_HTTP_HEADER_SIZE=65536
+
+# Whether SSL is enabled
+#
+# export HTTPFS_SSL_ENABLED=false
+
+# The location of the SSL keystore if using SSL
+#
+# export HTTPFS_SSL_KEYSTORE_FILE=${HOME}/.keystore
+
+# The password of the SSL keystore if using SSL
+#
+# export HTTPFS_SSL_KEYSTORE_PASS=password
diff --git a/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc
b/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc
index 69e6b41..7373202 100644
--- a/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc
+++ b/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc
@@ -15,11 +15,11 @@
HADOOP_NAME="httpfs"
DAEMON="hadoop-$HADOOP_NAME"
DESC="Hadoop $HADOOP_NAME"
-EXEC_PATH="/usr/lib/hadoop-httpfs/sbin/httpfs.sh"
+EXEC_PATH="/usr/lib/hadoop/sbin/httpfs.sh"
SVC_USER="$HADOOP_NAME"
WORKING_DIR="/var/run/hadoop-httpfs"
DAEMON_FLAGS="$HADOOP_NAME"
-CONF_DIR="/etc/$DAEMON/conf"
+CONF_DIR="/etc/hadoop/conf"
PIDFILE="/var/run/$DAEMON/hadoop-$SVC_USER-$HADOOP_NAME.pid"
CHKCONFIG=${CHKCONFIG:-"2345 90 10"}
@@ -28,42 +28,27 @@ CHKCONFIG="${CHKCONFIG%% *} 90 10"
generate_start() {
cat <<'__EOT__'
+
start() {
[ -x $EXEC_PATH ] || exit $ERROR_PROGRAM_NOT_INSTALLED
[ -d $CONF_DIR ] || exit $ERROR_PROGRAM_NOT_CONFIGURED
- export HTTPFS_USER="$SVC_USER"
- export HTTPFS_CONFIG="$CONF_DIR"
- export HTTPFS_LOG=${HTTPFS_LOG:-"/var/log/hadoop-httpfs/"}
- export HTTPFS_TEMP="$(dirname $PIDFILE)"
- export HTTPFS_SLEEP_TIME="$SLEEP_TIME"
- export
CATALINA_BASE=${CATALINA_BASE:-"/var/lib/hadoop-httpfs/tomcat-deployment"}
- export CATALINA_PID="$PIDFILE"
- export CATALINA_TMPDIR="$HTTPFS_TEMP"
-
- . /usr/lib/hadoop-httpfs/tomcat-deployment.sh
-
- runuser -s /bin/bash -c "${EXEC_PATH} start $DAEMON_FLAGS" $HTTPFS_USER
+ runuser -s /bin/bash $SVC_USER -c "cd $WORKING_DIR && $EXEC_PATH start
$DAEMON_FLAGS"
- for second in {5..0}
- do
- checkstatusofproc
- RETVAL=$?
- if [ "$RETVAL" -eq $RETVAL_SUCCESS ] ; then
- break
- fi
- sleep 1
- done
+ # Some processes are slow to start
+ sleep $SLEEP_TIME
+ checkstatusofproc
+ RETVAL=$?
if [ $RETVAL -eq $STATUS_RUNNING ]; then
touch $LOCKFILE
- log_success_msg "Started ${DESC} (${DAEMON}): "
+ log_success_msg "Started ${DESC}: "
else
log_failure_msg "Failed to start ${DESC}. Return value: $RETVAL"
fi
-
return $RETVAL
}
+
__EOT__
}
@@ -71,24 +56,19 @@ __EOT__
generate_stop() {
cat <<'__EOT__'
-stop() {
- # FIXME: workaround for BIGTOP-537
- checkstatusofproc
- if [ "$?" = "$STATUS_RUNNING" ] ; then
- runuser -s /bin/bash $SVC_USER -c "${EXEC_PATH} stop $SLEEP_TIME -force"
- RETVAL=$?
- else
- RETVAL=$RETVAL_SUCCESS
- fi
+stop() {
+ start_daemon $EXEC_PATH stop $DAEMON_FLAGS
+ RETVAL=$?
if [ $RETVAL -eq $RETVAL_SUCCESS ]; then
log_success_msg "Stopped ${DESC}: "
rm -f $LOCKFILE $PIDFILE
else
- log_failure_msg "Failure to stop ${DESC}. Return value: $RETVAL"
+ log_failure_msg "Failed to stop ${DESC}. Return value: $RETVAL."
fi
}
+
__EOT__
}
diff --git a/bigtop-packages/src/common/hadoop/httpfs.default
b/bigtop-packages/src/common/hadoop/httpfs.default
index 86822c2..79aaa4e 100644
--- a/bigtop-packages/src/common/hadoop/httpfs.default
+++ b/bigtop-packages/src/common/hadoop/httpfs.default
@@ -12,13 +12,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-export HTTPFS_USER=httpfs
-export HTTPFS_CONFIG=/etc/hadoop-httpfs/conf
-export HTTPFS_LOG=/var/log/hadoop-httpfs/
-export HTTPFS_TEMP=/var/run/hadoop-httpfs/
-export HTTPFS_CATALINA_HOME=/usr/lib/bigtop-tomcat
-export CATALINA_PID=/var/run/hadoop-httpfs/hadoop-httpfs-httpfs.pid
-export CATALINA_BASE=/var/lib/hadoop-httpfs/tomcat-deployment
-export CATALINA_TMPDIR=/var/run/hadoop-httpfs/
-# HTTPFS_HTTP_PORT
-# HTTPFS_ADMIN_PORT
+export HADOOP_CONF_DIR=/etc/hadoop/conf
+export HADOOP_LOG_DIR=/var/log/hadoop-httpfs
+export HADOOP_PID_DIR=/var/run/hadoop-httpfs
+export HTTPFS_TEMP=/var/lib/hadoop-httpfs/temp
diff --git a/bigtop-packages/src/common/hadoop/install_hadoop.sh
b/bigtop-packages/src/common/hadoop/install_hadoop.sh
index b79e82f..7ab5505 100755
--- a/bigtop-packages/src/common/hadoop/install_hadoop.sh
+++ b/bigtop-packages/src/common/hadoop/install_hadoop.sh
@@ -40,7 +40,6 @@ OPTS=$(getopt \
-l 'native-build-string:' \
-l 'installed-lib-dir:' \
-l 'hadoop-dir:' \
- -l 'httpfs-dir:' \
-l 'hdfs-dir:' \
-l 'yarn-dir:' \
-l 'mapreduce-dir:' \
@@ -68,9 +67,6 @@ while true ; do
--distro-dir)
DISTRO_DIR=$2 ; shift 2
;;
- --httpfs-dir)
- HTTPFS_DIR=$2 ; shift 2
- ;;
--hadoop-dir)
HADOOP_DIR=$2 ; shift 2
;;
@@ -139,7 +135,6 @@ HDFS_DIR=${HDFS_DIR:-$PREFIX/usr/lib/hadoop-hdfs}
YARN_DIR=${YARN_DIR:-$PREFIX/usr/lib/hadoop-yarn}
MAPREDUCE_DIR=${MAPREDUCE_DIR:-$PREFIX/usr/lib/hadoop-mapreduce}
CLIENT_DIR=${CLIENT_DIR:-$PREFIX/usr/lib/hadoop/client}
-HTTPFS_DIR=${HTTPFS_DIR:-$PREFIX/usr/lib/hadoop-httpfs}
SYSTEM_LIB_DIR=${SYSTEM_LIB_DIR:-/usr/lib}
BIN_DIR=${BIN_DIR:-$PREFIX/usr/bin}
DOC_DIR=${DOC_DIR:-$PREFIX/usr/share/doc/hadoop}
@@ -231,7 +226,7 @@ cp -a ${BUILD_DIR}/bin/mapred ${YARN_DIR}/bin
# sbin
install -d -m 0755 ${HADOOP_DIR}/sbin
-cp -a ${BUILD_DIR}/sbin/{hadoop-daemon,hadoop-daemons,workers,kms}.sh
${HADOOP_DIR}/sbin
+cp -a ${BUILD_DIR}/sbin/{hadoop-daemon,hadoop-daemons,workers,httpfs,kms}.sh
${HADOOP_DIR}/sbin
install -d -m 0755 ${HDFS_DIR}/sbin
cp -a ${BUILD_DIR}/sbin/{distribute-exclude,refresh-namenodes}.sh
${HDFS_DIR}/sbin
install -d -m 0755 ${YARN_DIR}/sbin
@@ -320,8 +315,6 @@ for manpage in hadoop hdfs yarn mapred; do
done
# HTTPFS
-install -d -m 0755 ${HTTPFS_DIR}/sbin
-cp ${BUILD_DIR}/sbin/httpfs.sh ${HTTPFS_DIR}/sbin/
install -d -m 0755 ${PREFIX}/var/lib/hadoop-httpfs
# KMS
@@ -355,7 +348,7 @@ install -d -m 0755 $PREFIX/var/{log,run,lib}/hadoop-yarn
install -d -m 0755 $PREFIX/var/{log,run,lib}/hadoop-mapreduce
# Remove all source and create version-less symlinks to offer integration
point with other projects
-for DIR in ${HADOOP_DIR} ${HDFS_DIR} ${YARN_DIR} ${MAPREDUCE_DIR}
${HTTPFS_DIR} ; do
+for DIR in ${HADOOP_DIR} ${HDFS_DIR} ${YARN_DIR} ${MAPREDUCE_DIR} ; do
(cd $DIR &&
rm -fv *-sources.jar
rm -fv lib/hadoop-*.jar
diff --git a/bigtop-packages/src/common/hadoop/patch6-fix-httpfs-sh.diff
b/bigtop-packages/src/common/hadoop/patch6-fix-httpfs-sh.diff
new file mode 100644
index 0000000..38b27c9
--- /dev/null
+++ b/bigtop-packages/src/common/hadoop/patch6-fix-httpfs-sh.diff
@@ -0,0 +1,15 @@
+diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
+index 83e0b4382b9..56b451815f1 100755
+--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
++++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
+@@ -54,8 +54,8 @@ case $1 in
+ esac
+
+ # Locate bin
+-if [[ -n "${HADOOP_HOME}" ]]; then
+- bin="${HADOOP_HOME}/bin"
++if [[ -n "${HADOOP_HDFS_HOME}" ]]; then
++ bin="${HADOOP_HDFS_HOME}/bin"
+ else
+ sbin=$(cd -P -- "$(dirname -- "$0")" >/dev/null && pwd -P)
+ bin=$(cd -P -- "${sbin}/../bin" >/dev/null && pwd -P)
diff --git a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.dirs
b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.dirs
index 01b1af6..58d9b1e 100644
--- a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.dirs
+++ b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.dirs
@@ -1,2 +1 @@
-/usr/lib/hadoop-httpfs
/var/log/hadoop-httpfs
diff --git a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.install
b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.install
index eeb8db2..8619637 100644
--- a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.install
+++ b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.install
@@ -1,3 +1,5 @@
-/etc/default/hadoop-httpfs
-/usr/lib/hadoop-httpfs
+/etc/hadoop/conf.empty/httpfs-env.sh
+/etc/hadoop/conf.empty/httpfs-log4j.properties
+/etc/hadoop/conf.empty/httpfs-signature.secret
+/etc/hadoop/conf.empty/httpfs-site.xml
/var/lib/hadoop-httpfs
diff --git a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.postinst
b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.postinst
index eeb5bcf..44eb1b4 100644
--- a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.postinst
+++ b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.postinst
@@ -21,7 +21,7 @@ set -e
case "$1" in
configure)
- chown httpfs:httpfs /var/run/hadoop-httpfs /var/log/hadoop-httpfs
+ chown httpfs:httpfs /var/run/hadoop-httpfs /var/log/hadoop-httpfs
/var/lib/hadoop-httpfs
;;
abort-upgrade|abort-remove|abort-deconfigure)
diff --git a/bigtop-packages/src/deb/hadoop/rules
b/bigtop-packages/src/deb/hadoop/rules
index de0a1f7..17e5987 100755
--- a/bigtop-packages/src/deb/hadoop/rules
+++ b/bigtop-packages/src/deb/hadoop/rules
@@ -57,7 +57,6 @@ override_dh_auto_install:
--prefix=debian/tmp/ \
--distro-dir=debian \
--build-dir=${PWD}/build \
- --httpfs-dir=debian/tmp/usr/lib/hadoop-httpfs \
--system-lib-dir=debian/tmp/usr/lib/ \
--system-libexec-dir=debian/tmp/usr/lib/hadoop/libexec/ \
--system-include-dir=debian/tmp/usr/include \
diff --git a/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
b/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
index 729f4e3..107cf11 100644
--- a/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
+++ b/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
@@ -28,7 +28,6 @@
%define config_yarn %{etc_yarn}/conf
%define lib_hadoop_dirname /usr/lib
%define lib_hadoop %{lib_hadoop_dirname}/%{name}
-%define lib_httpfs %{lib_hadoop_dirname}/%{name}-httpfs
%define lib_hdfs %{lib_hadoop_dirname}/%{name}-hdfs
%define lib_yarn %{lib_hadoop_dirname}/%{name}-yarn
%define lib_mapreduce %{lib_hadoop_dirname}/%{name}-mapreduce
@@ -509,7 +508,6 @@ bash %{SOURCE1}
env HADOOP_VERSION=%{hadoop_base_version} bash %{SOURCE2} \
--distro-dir=$RPM_SOURCE_DIR \
--build-dir=$PWD/build \
- --httpfs-dir=$RPM_BUILD_ROOT%{lib_httpfs} \
--system-include-dir=$RPM_BUILD_ROOT%{_includedir} \
--system-lib-dir=$RPM_BUILD_ROOT%{_libdir} \
--system-libexec-dir=$RPM_BUILD_ROOT/%{lib_hadoop}/libexec \
@@ -719,9 +717,13 @@ fi
%files httpfs
%defattr(-,root,root)
+
%config(noreplace) /etc/default/%{name}-httpfs
+%config(noreplace) %{etc_hadoop}/conf.empty/httpfs-env.sh
+%config(noreplace) %{etc_hadoop}/conf.empty/httpfs-log4j.properties
+%config(noreplace) %{etc_hadoop}/conf.empty/httpfs-signature.secret
+%config(noreplace) %{etc_hadoop}/conf.empty/httpfs-site.xml
%{initd_dir}/%{name}-httpfs
-%{lib_httpfs}
%attr(0775,httpfs,httpfs) %{run_httpfs}
%attr(0775,httpfs,httpfs) %{log_httpfs}
%attr(0775,httpfs,httpfs) %{state_httpfs}