Repository: hadoop
Updated Branches:
  refs/heads/HADOOP-12930 e1cae42ad -> d0ff3c336


HADOOP-13086. enable daemonization of dynamic commands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d0ff3c33
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d0ff3c33
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d0ff3c33

Branch: refs/heads/HADOOP-12930
Commit: d0ff3c3361946d1c9a35452c2edfce03cfa983b2
Parents: e1cae42
Author: Allen Wittenauer <a...@apache.org>
Authored: Wed May 4 10:13:18 2016 -0700
Committer: Allen Wittenauer <a...@apache.org>
Committed: Wed May 4 10:13:18 2016 -0700

----------------------------------------------------------------------
 .../hadoop-common/src/main/bin/hadoop           | 53 ++++++++++++++++++-
 .../hadoop-hdfs/src/main/bin/hdfs               | 36 ++++++-------
 hadoop-mapreduce-project/bin/mapred             | 49 +++++++++++------
 hadoop-yarn-project/hadoop-yarn/bin/yarn        | 55 ++++++++++++++------
 4 files changed, 142 insertions(+), 51 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d0ff3c33/hadoop-common-project/hadoop-common/src/main/bin/hadoop
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
index bb4b041..7b18d22 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
@@ -207,6 +207,57 @@ fi
 hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
 HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
 
+if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
+  HADOOP_SECURE_USER="${HADOOP_SUBCMD_SECUREUSER}"
+  hadoop_verify_secure_prereq
+  hadoop_setup_secure_service
+  
priv_outfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
+  
priv_errfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.err"
+  
priv_pidfile="${HADOOP_PID_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+  
daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
+  
daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+else
+  
daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
+  
daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+fi
+
+if [[ "${HADOOP_DAEMON_MODE}" != "default" ]]; then
+  # shellcheck disable=SC2034
+  HADOOP_ROOT_LOGGER="${HADOOP_DAEMON_ROOT_LOGGER}"
+  if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
+    # shellcheck disable=SC2034
+    
HADOOP_LOGFILE="hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.log"
+  else
+    # shellcheck disable=SC2034
+    
HADOOP_LOGFILE="hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.log"
+  fi
+fi
+
 hadoop_finalize
-hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "$@"
 
+if [[ -n "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
+  if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
+    hadoop_secure_daemon_handler \
+      "${HADOOP_DAEMON_MODE}" \
+      "${HADOOP_SUBCMD}" \
+      "${HADOOP_CLASSNAME}" \
+      "${daemon_pidfile}" \
+      "${daemon_outfile}" \
+      "${priv_pidfile}" \
+      "${priv_outfile}" \
+      "${priv_errfile}" \
+      "$@"
+  else
+    hadoop_daemon_handler \
+      "${HADOOP_DAEMON_MODE}" \
+      "${HADOOP_SUBCMD}" \
+      "${HADOOP_CLASSNAME}" \
+      "${daemon_pidfile}" \
+      "${daemon_outfile}" \
+      "$@"
+  fi
+  exit $?
+else
+  # shellcheck disable=SC2086
+  hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "$@"
+fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d0ff3c33/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
index 310fb41..4c0b7fb 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
@@ -76,7 +76,7 @@ function hdfscmd_case
 
   case ${subcmd} in
     balancer)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       HADOOP_CLASSNAME=org.apache.hadoop.hdfs.server.balancer.Balancer
       hadoop_debug "Appending HADOOP_BALANCER_OPTS onto HADOOP_OPTS"
       HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_BALANCER_OPTS}"
@@ -91,12 +91,12 @@ function hdfscmd_case
       HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.CryptoAdmin
     ;;
     datanode)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       # Determine if we're starting a secure datanode, and
       # if so, redefine appropriate variables
       if [[ -n "${HADOOP_SECURE_DN_USER}" ]]; then
-        secure_service="true"
-        secure_user="${HADOOP_SECURE_DN_USER}"
+        HADOOP_SUBCMD_SECURESERVICE="true"
+        HADOOP_SUBCMD_SECUREUSER="${HADOOP_SECURE_DN_USER}"
 
         # backward compatiblity
         
HADOOP_SECURE_PID_DIR="${HADOOP_SECURE_PID_DIR:-$HADOOP_SECURE_DN_PID_DIR}"
@@ -161,7 +161,7 @@ function hdfscmd_case
       HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
     ;;
     journalnode)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       HADOOP_CLASSNAME='org.apache.hadoop.hdfs.qjournal.server.JournalNode'
       hadoop_debug "Appending HADOOP_JOURNALNODE_OPTS onto HADOOP_OPTS"
       HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_JOURNALNODE_OPTS}"
@@ -173,23 +173,23 @@ function hdfscmd_case
       HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.snapshot.LsSnapshottableDir
     ;;
     mover)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       HADOOP_CLASSNAME=org.apache.hadoop.hdfs.server.mover.Mover
       hadoop_debug "Appending HADOOP_MOVER_OPTS onto HADOOP_OPTS"
       HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_MOVER_OPTS}"
     ;;
     namenode)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       HADOOP_CLASSNAME='org.apache.hadoop.hdfs.server.namenode.NameNode'
       hadoop_debug "Appending HADOOP_NAMENODE_OPTS onto HADOOP_OPTS"
       HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_NAMENODE_OPTS}"
       hadoop_add_param HADOOP_OPTS hdfs.audit.logger 
"-Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER}"
     ;;
     nfs3)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       if [[ -n "${HADOOP_PRIVILEGED_NFS_USER}" ]]; then
-        secure_service="true"
-        secure_user="${HADOOP_PRIVILEGED_NFS_USER}"
+        HADOOP_SUBCMD_SECURESERVICE="true"
+        HADOOP_SUBCMD_SECUREUSER="${HADOOP_PRIVILEGED_NFS_USER}"
 
         # backward compatiblity
         
HADOOP_SECURE_PID_DIR="${HADOOP_SECURE_PID_DIR:-$HADOOP_SECURE_NFS3_PID_DIR}"
@@ -215,13 +215,13 @@ function hdfscmd_case
       
HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewer
     ;;
     portmap)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       HADOOP_CLASSNAME=org.apache.hadoop.portmap.Portmap
       hadoop_debug "Appending HADOOP_PORTMAP_OPTS onto HADOOP_OPTS"
       HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_PORTMAP_OPTS}"
     ;;
     secondarynamenode)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       
HADOOP_CLASSNAME='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
       hadoop_debug "Appending HADOOP_SECONDARYNAMENODE_OPTS onto HADOOP_OPTS"
       HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_SECONDARYNAMENODE_OPTS}"
@@ -237,7 +237,7 @@ function hdfscmd_case
       HADOOP_CLASSNAME=org.apache.hadoop.util.VersionInfo
     ;;
     zkfc)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       HADOOP_CLASSNAME='org.apache.hadoop.hdfs.tools.DFSZKFailoverController'
       hadoop_debug "Appending HADOOP_ZKFC_OPTS onto HADOOP_OPTS"
       HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_ZKFC_OPTS}"
@@ -289,8 +289,8 @@ if [[ ${HADOOP_SLAVE_MODE} = true ]]; then
   exit $?
 fi
 
-if [[ -n "${secure_service}" ]]; then
-  HADOOP_SECURE_USER="${secure_user}"
+if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
+  HADOOP_SECURE_USER="${HADOOP_SUBCMD_SECUREUSER}"
   hadoop_verify_secure_prereq
   hadoop_setup_secure_service
   
priv_outfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
@@ -306,7 +306,7 @@ fi
 if [[ "${HADOOP_DAEMON_MODE}" != "default" ]]; then
   # shellcheck disable=SC2034
   HADOOP_ROOT_LOGGER="${HADOOP_DAEMON_ROOT_LOGGER}"
-  if [[ -n "${secure_service}" ]]; then
+  if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
     # shellcheck disable=SC2034
     
HADOOP_LOGFILE="hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.log"
   else
@@ -317,8 +317,8 @@ fi
 
 hadoop_finalize
 
-if [[ -n "${supportdaemonization}" ]]; then
-  if [[ -n "${secure_service}" ]]; then
+if [[ -n "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
+  if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
     hadoop_secure_daemon_handler \
     "${HADOOP_DAEMON_MODE}" "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}"\
     "${daemon_pidfile}" "${daemon_outfile}" \

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d0ff3c33/hadoop-mapreduce-project/bin/mapred
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/bin/mapred 
b/hadoop-mapreduce-project/bin/mapred
index 84f03e4..79ca779 100755
--- a/hadoop-mapreduce-project/bin/mapred
+++ b/hadoop-mapreduce-project/bin/mapred
@@ -67,7 +67,7 @@ function mapredcmd_case
       exit 0
     ;;
     historyserver)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       HADOOP_CLASSNAME=org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer
       hadoop_debug "Appending HADOOP_JOB_HISTORYSERVER_OPTS onto HADOOP_OPTS"
       HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_JOB_HISTORYSERVER_OPTS}"
@@ -154,9 +154,19 @@ if [[ ${HADOOP_SLAVE_MODE} = true ]]; then
   exit $?
 fi
 
-daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
-daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
-
+if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
+  HADOOP_SECURE_USER="${HADOOP_SUBCMD_SECUREUSER}"
+  hadoop_verify_secure_prereq
+  hadoop_setup_secure_service
+  
priv_outfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
+  
priv_errfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.err"
+  
priv_pidfile="${HADOOP_PID_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+  
daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
+  
daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+else
+  
daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
+  
daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+fi
 
 if [[  "${HADOOP_DAEMON_MODE}" != "default" ]]; then
   # shellcheck disable=SC2034
@@ -168,19 +178,28 @@ fi
 
 hadoop_finalize
 
-if [[ -n "${supportdaemonization}" ]]; then
-  # shellcheck disable=SC2154
-  if [[ -n "${secure_service}" ]]; then
-    # shellcheck disable=SC2154
-    hadoop_secure_daemon_handler "${HADOOP_DAEMON_MODE}" "${HADOOP_SUBCMD}"\
-      "${HADOOP_CLASSNAME}" "${daemon_pidfile}" "${daemon_outfile}" \
-      "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
+if [[ -n "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
+  if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
+    hadoop_secure_daemon_handler \
+      "${HADOOP_DAEMON_MODE}" \
+      "${HADOOP_SUBCMD}" \
+      "${HADOOP_CLASSNAME}" \
+      "${daemon_pidfile}" \
+      "${daemon_outfile}" \
+      "${priv_pidfile}" \
+      "${priv_outfile}" \
+      "${priv_errfile}" \
+      "$@"
   else
-    hadoop_daemon_handler "${HADOOP_DAEMON_MODE}" "${HADOOP_SUBCMD}" 
"${HADOOP_CLASSNAME}" \
-      "${daemon_pidfile}" "${daemon_outfile}" "$@"
+    hadoop_daemon_handler \
+      "${HADOOP_DAEMON_MODE}" \
+      "${HADOOP_SUBCMD}" \
+      "${HADOOP_CLASSNAME}" \
+      "${daemon_pidfile}" \
+      "${daemon_outfile}" \
+      "$@"
   fi
   exit $?
 else
   hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "$@"
-fi
-
+fi
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d0ff3c33/hadoop-yarn-project/hadoop-yarn/bin/yarn
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn 
b/hadoop-yarn-project/hadoop-yarn/bin/yarn
index 7544b58..eb0fccd 100755
--- a/hadoop-yarn-project/hadoop-yarn/bin/yarn
+++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn
@@ -101,7 +101,7 @@ function yarncmd_case
       HADOOP_OPTS="${HADOOP_OPTS} ${YARN_CLIENT_OPTS}"
     ;;
     historyserver)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       echo "DEPRECATED: Use of this command to start the timeline server is 
deprecated." 1>&2
       echo "Instead use the timelineserver command for it." 1>&2
       echo "Starting the History Server anyway..." 1>&2
@@ -118,7 +118,7 @@ function yarncmd_case
       HADOOP_OPTS="${HADOOP_OPTS} ${YARN_CLIENT_OPTS}"
     ;;
     nodemanager)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.nodemanager.NodeManager'
       hadoop_debug "Append YARN_NODEMANAGER_OPTS onto HADOOP_OPTS"
       HADOOP_OPTS="${HADOOP_OPTS} ${YARN_NODEMANAGER_OPTS}"
@@ -128,7 +128,7 @@ function yarncmd_case
       fi
     ;;
     proxyserver)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.webproxy.WebAppProxyServer'
       hadoop_debug "Append YARN_PROXYSERVER_OPTS onto HADOOP_OPTS"
       HADOOP_OPTS="${HADOOP_OPTS} ${YARN_PROXYSERVER_OPTS}"
@@ -144,7 +144,7 @@ function yarncmd_case
       HADOOP_OPTS="${HADOOP_OPTS} ${YARN_CLIENT_OPTS}"
     ;;
     resourcemanager)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.resourcemanager.ResourceManager'
       HADOOP_OPTS="${HADOOP_OPTS} ${YARN_RESOURCEMANAGER_OPTS}"
       hadoop_debug "Append YARN_RESOURCEMANAGER_OPTS onto HADOOP_OPTS"
@@ -165,13 +165,13 @@ function yarncmd_case
       HADOOP_OPTS="${HADOOP_OPTS} ${YARN_CLIENT_OPTS}"
     ;;
     sharedcachemanager)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.sharedcachemanager.SharedCacheManager'
       hadoop_debug "Append YARN_SHAREDCACHEMANAGER_OPTS onto HADOOP_OPTS"
       HADOOP_OPTS="${HADOOP_OPTS} ${YARN_SHAREDCACHEMANAGER_OPTS}"
     ;;
     timelineserver)
-      supportdaemonization="true"
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer'
       hadoop_debug "Append YARN_TIMELINESERVER_OPTS onto HADOOP_OPTS"
       HADOOP_OPTS="${HADOOP_OPTS} ${YARN_TIMELINESERVER_OPTS}"
@@ -267,8 +267,19 @@ if [[ ${HADOOP_SLAVE_MODE} = true ]]; then
   exit $?
 fi
 
-daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
-daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
+  HADOOP_SECURE_USER="${HADOOP_SUBCMD_SECUREUSER}"
+  hadoop_verify_secure_prereq
+  hadoop_setup_secure_service
+  
priv_outfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
+  
priv_errfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.err"
+  
priv_pidfile="${HADOOP_PID_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+  
daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
+  
daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+else
+  
daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
+  
daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+fi
 
 if [[  "${HADOOP_DAEMON_MODE}" != "default" ]]; then
   # shellcheck disable=SC2034
@@ -279,16 +290,26 @@ fi
 
 hadoop_finalize
 
-if [[ -n "${supportdaemonization}" ]]; then
-  # shellcheck disable=SC2154
-  if [[ -n "${secure_service}" ]]; then
-    # shellcheck disable=SC2154
-    hadoop_secure_daemon_handler "${HADOOP_DAEMON_MODE}" "${HADOOP_SUBCMD}" \
-      "${HADOOP_CLASSNAME}" "${daemon_pidfile}" "${daemon_outfile}" \
-      "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
+if [[ -n "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
+  if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
+    hadoop_secure_daemon_handler \
+      "${HADOOP_DAEMON_MODE}" \
+      "${HADOOP_SUBCMD}" \
+      "${HADOOP_CLASSNAME}" \
+      "${daemon_pidfile}" \
+      "${daemon_outfile}" \
+      "${priv_pidfile}" \
+      "${priv_outfile}" \
+      "${priv_errfile}" \
+      "$@"
   else
-    hadoop_daemon_handler "${HADOOP_DAEMON_MODE}" "${HADOOP_SUBCMD}" 
"${HADOOP_CLASSNAME}" \
-      "${daemon_pidfile}" "${daemon_outfile}" "$@"
+    hadoop_daemon_handler \
+      "${HADOOP_DAEMON_MODE}" \
+      "${HADOOP_SUBCMD}" \
+      "${HADOOP_CLASSNAME}" \
+      "${daemon_pidfile}" \
+      "${daemon_outfile}" \
+      "$@"
   fi
   exit $?
 else


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to