[ 
https://issues.apache.org/jira/browse/AMBARI-7977?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Andrew Onischuk resolved AMBARI-7977.
-------------------------------------
    Resolution: Cannot Reproduce

> Webhcat fails to start on HDP 2.1
> ---------------------------------
>
>                 Key: AMBARI-7977
>                 URL: https://issues.apache.org/jira/browse/AMBARI-7977
>             Project: Ambari
>          Issue Type: Bug
>            Reporter: Andrew Onischuk
>            Assignee: Andrew Onischuk
>             Fix For: 1.7.0
>
>
> Using 1.7.0 and HDP 2.1 stack, webhcat fails to start: hdfs, yarn+mr, tez,
> nagios,ganglia,hive, zk
> [root@c6401 yum.repos.d]# ambari-server --hash  
> a7a96001384d917cb951e8a7784f975eab5a59f0
>     
>     
>     
>     stderr:  
>     
>     2014-10-24 22:48:09,228 - Error while executing command 'start':
>     Traceback (most recent call last):
>       File 
> "/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py",
>  line 122, in execute
>         method(env)
>       File 
> "/var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_server.py",
>  line 38, in start
>         self.configure(env) # FOR SECURITY
>       File 
> "/var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_server.py",
>  line 33, in configure
>         webhcat()
>       File 
> "/var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py",
>  line 156, in webhcat
>         mutable_configs = __inject_config_variables(mutable_configs)
>       File 
> "/var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py",
>  line 46, in __inject_config_variables
>         if prop_value:
>       File 
> "/usr/lib/python2.6/site-packages/resource_management/libraries/script/config_dictionary.py",
>  line 94, in __getattr__
>         raise Fail("Configuration parameter '"+self.name+"' was not found in 
> configurations dictionary!")
>     Fail: Configuration parameter 'templeton.sqoop.archive' was not found in 
> configurations dictionary!
>     
>     stdout:  
>     
>     2014-10-24 22:47:04,233 - Execute['mkdir -p 
> /var/lib/ambari-agent/data/tmp/AMBARI-artifacts/;     curl -kf -x "" --retry 
> 10     
> http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o 
> /var/lib/ambari-agent/data/tmp/AMBARI-artifacts//UnlimitedJCEPolicyJDK7.zip'] 
> {'environment': ..., 'not_if': 'test -e 
> /var/lib/ambari-agent/data/tmp/AMBARI-artifacts//UnlimitedJCEPolicyJDK7.zip', 
> 'ignore_failures': True, 'path': ['/bin', '/usr/bin/']}
>     2014-10-24 22:47:04,263 - Skipping Execute['mkdir -p 
> /var/lib/ambari-agent/data/tmp/AMBARI-artifacts/;     curl -kf -x "" --retry 
> 10     
> http://c6401.ambari.apache.org:8080/resources//UnlimitedJCEPolicyJDK7.zip -o 
> /var/lib/ambari-agent/data/tmp/AMBARI-artifacts//UnlimitedJCEPolicyJDK7.zip'] 
> due to not_if
>     2014-10-24 22:47:04,263 - Group['hadoop'] {'ignore_failures': False}
>     2014-10-24 22:47:04,264 - Modifying group hadoop
>     2014-10-24 22:47:04,307 - Group['nobody'] {'ignore_failures': False}
>     2014-10-24 22:47:04,308 - Modifying group nobody
>     2014-10-24 22:47:04,341 - Group['users'] {'ignore_failures': False}
>     2014-10-24 22:47:04,341 - Modifying group users
>     2014-10-24 22:47:04,377 - Group['nagios'] {'ignore_failures': False}
>     2014-10-24 22:47:04,377 - Modifying group nagios
>     2014-10-24 22:47:04,433 - User['nobody'] {'gid': 'hadoop', 
> 'ignore_failures': False, 'groups': [u'nobody']}
>     2014-10-24 22:47:04,433 - Modifying user nobody
>     2014-10-24 22:47:04,449 - User['hive'] {'gid': 'hadoop', 
> 'ignore_failures': False, 'groups': [u'hadoop']}
>     2014-10-24 22:47:04,450 - Modifying user hive
>     2014-10-24 22:47:04,462 - User['mapred'] {'gid': 'hadoop', 
> 'ignore_failures': False, 'groups': [u'hadoop']}
>     2014-10-24 22:47:04,462 - Modifying user mapred
>     2014-10-24 22:47:04,484 - User['nagios'] {'gid': 'nagios', 
> 'ignore_failures': False, 'groups': [u'hadoop']}
>     2014-10-24 22:47:04,485 - Modifying user nagios
>     2014-10-24 22:47:04,511 - User['ambari-qa'] {'gid': 'hadoop', 
> 'ignore_failures': False, 'groups': [u'users']}
>     2014-10-24 22:47:04,512 - Modifying user ambari-qa
>     2014-10-24 22:47:04,524 - User['zookeeper'] {'gid': 'hadoop', 
> 'ignore_failures': False, 'groups': [u'hadoop']}
>     2014-10-24 22:47:04,524 - Modifying user zookeeper
>     2014-10-24 22:47:04,538 - User['tez'] {'gid': 'hadoop', 
> 'ignore_failures': False, 'groups': [u'users']}
>     2014-10-24 22:47:04,538 - Modifying user tez
>     2014-10-24 22:47:04,550 - User['hdfs'] {'gid': 'hadoop', 
> 'ignore_failures': False, 'groups': [u'hadoop']}
>     2014-10-24 22:47:04,550 - Modifying user hdfs
>     2014-10-24 22:47:04,563 - User['yarn'] {'gid': 'hadoop', 
> 'ignore_failures': False, 'groups': [u'hadoop']}
>     2014-10-24 22:47:04,563 - Modifying user yarn
>     2014-10-24 22:47:04,577 - User['hcat'] {'gid': 'hadoop', 
> 'ignore_failures': False, 'groups': [u'hadoop']}
>     2014-10-24 22:47:04,578 - Modifying user hcat
>     2014-10-24 22:47:04,590 - 
> File['/var/lib/ambari-agent/data/tmp/changeUid.sh'] {'content': 
> StaticFile('changeToSecureUid.sh'), 'mode': 0555}
>     2014-10-24 22:47:04,593 - 
> Execute['/var/lib/ambari-agent/data/tmp/changeUid.sh ambari-qa 
> /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa
>  2>/dev/null'] {'not_if': 'test $(id -u ambari-qa) -gt 1000'}
>     2014-10-24 22:47:04,604 - Skipping 
> Execute['/var/lib/ambari-agent/data/tmp/changeUid.sh ambari-qa 
> /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa
>  2>/dev/null'] due to not_if
>     2014-10-24 22:47:04,605 - Directory['/etc/hadoop/conf.empty'] {'owner': 
> 'root', 'group': 'root', 'recursive': True}
>     2014-10-24 22:47:04,605 - Link['/etc/hadoop/conf'] {'not_if': 'ls 
> /etc/hadoop/conf', 'to': '/etc/hadoop/conf.empty'}
>     2014-10-24 22:47:04,617 - Skipping Link['/etc/hadoop/conf'] due to not_if
>     2014-10-24 22:47:04,630 - File['/etc/hadoop/conf/hadoop-env.sh'] 
> {'content': InlineTemplate(...), 'owner': 'hdfs'}
>     2014-10-24 22:47:04,643 - Execute['/bin/echo 0 > /selinux/enforce'] 
> {'only_if': 'test -f /selinux/enforce'}
>     2014-10-24 22:47:04,659 - Skipping Execute['/bin/echo 0 > 
> /selinux/enforce'] due to only_if
>     2014-10-24 22:47:04,660 - Execute['mkdir -p 
> /usr/lib/hadoop/lib/native/Linux-i386-32; ln -sf /usr/lib/libsnappy.so 
> /usr/lib/hadoop/lib/native/Linux-i386-32/libsnappy.so'] {}
>     2014-10-24 22:47:04,676 - Execute['mkdir -p 
> /usr/lib/hadoop/lib/native/Linux-amd64-64; ln -sf /usr/lib64/libsnappy.so 
> /usr/lib/hadoop/lib/native/Linux-amd64-64/libsnappy.so'] {}
>     2014-10-24 22:47:04,688 - Directory['/var/log/hadoop'] {'owner': 'root', 
> 'group': 'hadoop', 'mode': 0775, 'recursive': True}
>     2014-10-24 22:47:04,689 - Directory['/var/run/hadoop'] {'owner': 'root', 
> 'group': 'root', 'recursive': True}
>     2014-10-24 22:47:04,689 - Directory['/tmp/hadoop-hdfs'] {'owner': 'hdfs', 
> 'recursive': True}
>     2014-10-24 22:47:04,694 - 
> File['/etc/hadoop/conf/commons-logging.properties'] {'content': 
> Template('commons-logging.properties.j2'), 'owner': 'hdfs'}
>     2014-10-24 22:47:04,697 - File['/etc/hadoop/conf/health_check'] 
> {'content': Template('health_check-v2.j2'), 'owner': 'hdfs'}
>     2014-10-24 22:47:04,698 - File['/etc/hadoop/conf/log4j.properties'] 
> {'content': '...', 'owner': 'hdfs', 'group': 'hadoop', 'mode': 0644}
>     2014-10-24 22:47:04,702 - 
> File['/etc/hadoop/conf/hadoop-metrics2.properties'] {'content': 
> Template('hadoop-metrics2.properties.j2'), 'owner': 'hdfs'}
>     2014-10-24 22:47:04,703 - File['/etc/hadoop/conf/task-log4j.properties'] 
> {'content': StaticFile('task-log4j.properties'), 'mode': 0755}
>     2014-10-24 22:47:04,705 - File['/etc/hadoop/conf/configuration.xsl'] 
> {'owner': 'hdfs', 'group': 'hadoop'}
>     2014-10-24 22:47:04,814 - HdfsDirectory['/apps/webhcat'] 
> {'security_enabled': False, 'keytab': [EMPTY], 'conf_dir': 
> '/etc/hadoop/conf', 'hdfs_user': 'hdfs', 'kinit_path_local': 
> '/usr/bin/kinit', 'mode': 0755, 'owner': 'hcat', 'bin_dir': '/usr/bin', 
> 'action': ['create_delayed']}
>     2014-10-24 22:47:04,815 - HdfsDirectory['/user/hcat'] 
> {'security_enabled': False, 'keytab': [EMPTY], 'conf_dir': 
> '/etc/hadoop/conf', 'hdfs_user': 'hdfs', 'kinit_path_local': 
> '/usr/bin/kinit', 'mode': 0755, 'owner': 'hcat', 'bin_dir': '/usr/bin', 
> 'action': ['create_delayed']}
>     2014-10-24 22:47:04,815 - HdfsDirectory['None'] {'security_enabled': 
> False, 'keytab': [EMPTY], 'conf_dir': '/etc/hadoop/conf', 'hdfs_user': 
> 'hdfs', 'kinit_path_local': '/usr/bin/kinit', 'action': ['create'], 
> 'bin_dir': '/usr/bin'}
>     2014-10-24 22:47:04,816 - Execute['hadoop --config /etc/hadoop/conf fs 
> -mkdir `rpm -q hadoop | grep -q "hadoop-1" || echo "-p"` /apps/webhcat 
> /user/hcat && hadoop --config /etc/hadoop/conf fs -chmod  755 /apps/webhcat 
> /user/hcat && hadoop --config /etc/hadoop/conf fs -chown  hcat /apps/webhcat 
> /user/hcat'] {'not_if': "su - hdfs -c 'export PATH=$PATH:/usr/bin ; hadoop 
> --config /etc/hadoop/conf fs -ls /apps/webhcat /user/hcat'", 'user': 'hdfs', 
> 'path': ['/usr/bin']}
>     2014-10-24 22:47:19,850 - Directory['/var/run/webhcat'] {'owner': 'hcat', 
> 'group': 'hadoop', 'recursive': True, 'mode': 0755}
>     2014-10-24 22:47:19,852 - Changing group for /var/run/webhcat from 0 to 
> hadoop
>     2014-10-24 22:47:19,852 - Directory['/var/log/webhcat'] {'owner': 'hcat', 
> 'group': 'hadoop', 'recursive': True, 'mode': 0755}
>     2014-10-24 22:47:19,852 - Creating directory Directory['/var/log/webhcat']
>     2014-10-24 22:47:19,860 - Changing owner for /var/log/webhcat from 0 to 
> hcat
>     2014-10-24 22:47:19,860 - Changing group for /var/log/webhcat from 0 to 
> hadoop
>     2014-10-24 22:47:19,861 - Directory['/etc/hive-webhcat/conf'] {'owner': 
> 'hcat', 'group': 'hadoop', 'recursive': True}
>     2014-10-24 22:47:19,862 - Changing owner for /etc/hive-webhcat/conf from 
> 0 to hcat
>     2014-10-24 22:47:19,862 - Changing group for /etc/hive-webhcat/conf from 
> 0 to hadoop
>     2014-10-24 22:47:19,862 - 
> CopyFromLocal['/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar'] 
> {'hadoop_conf_dir': '/etc/hadoop/conf', 'hdfs_user': 'hdfs', 'owner': 'hcat', 
> 'mode': 0755, 'dest_dir': '/apps/webhcat', 'hadoop_bin_dir': '/usr/bin', 
> 'kinnit_if_needed': ''}
>     2014-10-24 22:47:19,865 - ExecuteHadoop['fs -copyFromLocal 
> /usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar /apps/webhcat'] {'not_if': 
> "su - hcat -c ' export PATH=$PATH:/usr/bin ; hadoop fs -ls 
> /apps/webhcat/hadoop-streaming-*.jar' >/dev/null 2>&1", 'bin_dir': 
> '/usr/bin', 'user': 'hcat', 'conf_dir': '/etc/hadoop/conf'}
>     2014-10-24 22:47:23,667 - Execute['hadoop --config /etc/hadoop/conf fs 
> -copyFromLocal /usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar 
> /apps/webhcat'] {'logoutput': False, 'path': ['/usr/bin'], 'tries': 1, 
> 'user': 'hcat', 'try_sleep': 0}
>     2014-10-24 22:47:28,060 - ExecuteHadoop['fs -chown hcat 
> /apps/webhcat/hadoop-streaming-*.jar'] {'bin_dir': '/usr/bin', 'user': 
> 'hdfs', 'conf_dir': '/etc/hadoop/conf'}
>     2014-10-24 22:47:28,061 - Execute['hadoop --config /etc/hadoop/conf fs 
> -chown hcat /apps/webhcat/hadoop-streaming-*.jar'] {'logoutput': False, 
> 'path': ['/usr/bin'], 'tries': 1, 'user': 'hdfs', 'try_sleep': 0}
>     2014-10-24 22:47:31,729 - ExecuteHadoop['fs -chmod 755 
> /apps/webhcat/hadoop-streaming-*.jar'] {'bin_dir': '/usr/bin', 'user': 
> 'hdfs', 'conf_dir': '/etc/hadoop/conf'}
>     2014-10-24 22:47:31,731 - Execute['hadoop --config /etc/hadoop/conf fs 
> -chmod 755 /apps/webhcat/hadoop-streaming-*.jar'] {'logoutput': False, 
> 'path': ['/usr/bin'], 'tries': 1, 'user': 'hdfs', 'try_sleep': 0}
>     2014-10-24 22:47:35,263 - 
> CopyFromLocal['/usr/share/HDP-webhcat/pig.tar.gz'] {'hadoop_conf_dir': 
> '/etc/hadoop/conf', 'hdfs_user': 'hdfs', 'owner': 'hcat', 'mode': 0755, 
> 'dest_dir': '/apps/webhcat', 'hadoop_bin_dir': '/usr/bin', 
> 'kinnit_if_needed': ''}
>     2014-10-24 22:47:35,265 - ExecuteHadoop['fs -copyFromLocal 
> /usr/share/HDP-webhcat/pig.tar.gz /apps/webhcat'] {'not_if': "su - hcat -c ' 
> export PATH=$PATH:/usr/bin ; hadoop fs -ls /apps/webhcat/pig.tar.gz' 
> >/dev/null 2>&1", 'bin_dir': '/usr/bin', 'user': 'hcat', 'conf_dir': 
> '/etc/hadoop/conf'}
>     2014-10-24 22:47:39,124 - Execute['hadoop --config /etc/hadoop/conf fs 
> -copyFromLocal /usr/share/HDP-webhcat/pig.tar.gz /apps/webhcat'] 
> {'logoutput': False, 'path': ['/usr/bin'], 'tries': 1, 'user': 'hcat', 
> 'try_sleep': 0}
>     2014-10-24 22:47:44,682 - ExecuteHadoop['fs -chown hcat 
> /apps/webhcat/pig.tar.gz'] {'bin_dir': '/usr/bin', 'user': 'hdfs', 
> 'conf_dir': '/etc/hadoop/conf'}
>     2014-10-24 22:47:44,684 - Execute['hadoop --config /etc/hadoop/conf fs 
> -chown hcat /apps/webhcat/pig.tar.gz'] {'logoutput': False, 'path': 
> ['/usr/bin'], 'tries': 1, 'user': 'hdfs', 'try_sleep': 0}
>     2014-10-24 22:47:48,687 - ExecuteHadoop['fs -chmod 755 
> /apps/webhcat/pig.tar.gz'] {'bin_dir': '/usr/bin', 'user': 'hdfs', 
> 'conf_dir': '/etc/hadoop/conf'}
>     2014-10-24 22:47:48,688 - Execute['hadoop --config /etc/hadoop/conf fs 
> -chmod 755 /apps/webhcat/pig.tar.gz'] {'logoutput': False, 'path': 
> ['/usr/bin'], 'tries': 1, 'user': 'hdfs', 'try_sleep': 0}
>     2014-10-24 22:47:52,320 - 
> CopyFromLocal['/usr/share/HDP-webhcat/hive.tar.gz'] {'hadoop_conf_dir': 
> '/etc/hadoop/conf', 'hdfs_user': 'hdfs', 'owner': 'hcat', 'mode': 0755, 
> 'dest_dir': '/apps/webhcat', 'hadoop_bin_dir': '/usr/bin', 
> 'kinnit_if_needed': ''}
>     2014-10-24 22:47:52,321 - ExecuteHadoop['fs -copyFromLocal 
> /usr/share/HDP-webhcat/hive.tar.gz /apps/webhcat'] {'not_if': "su - hcat -c ' 
> export PATH=$PATH:/usr/bin ; hadoop fs -ls /apps/webhcat/hive.tar.gz' 
> >/dev/null 2>&1", 'bin_dir': '/usr/bin', 'user': 'hcat', 'conf_dir': 
> '/etc/hadoop/conf'}
>     2014-10-24 22:47:55,775 - Execute['hadoop --config /etc/hadoop/conf fs 
> -copyFromLocal /usr/share/HDP-webhcat/hive.tar.gz /apps/webhcat'] 
> {'logoutput': False, 'path': ['/usr/bin'], 'tries': 1, 'user': 'hcat', 
> 'try_sleep': 0}
>     2014-10-24 22:48:01,849 - ExecuteHadoop['fs -chown hcat 
> /apps/webhcat/hive.tar.gz'] {'bin_dir': '/usr/bin', 'user': 'hdfs', 
> 'conf_dir': '/etc/hadoop/conf'}
>     2014-10-24 22:48:01,851 - Execute['hadoop --config /etc/hadoop/conf fs 
> -chown hcat /apps/webhcat/hive.tar.gz'] {'logoutput': False, 'path': 
> ['/usr/bin'], 'tries': 1, 'user': 'hdfs', 'try_sleep': 0}
>     2014-10-24 22:48:05,293 - ExecuteHadoop['fs -chmod 755 
> /apps/webhcat/hive.tar.gz'] {'bin_dir': '/usr/bin', 'user': 'hdfs', 
> 'conf_dir': '/etc/hadoop/conf'}
>     2014-10-24 22:48:05,294 - Execute['hadoop --config /etc/hadoop/conf fs 
> -chmod 755 /apps/webhcat/hive.tar.gz'] {'logoutput': False, 'path': 
> ['/usr/bin'], 'tries': 1, 'user': 'hdfs', 'try_sleep': 0}
>     2014-10-24 22:48:09,228 - Did not find pig tar source file and 
> destination folder properties in cluster-env.xml
>     2014-10-24 22:48:09,228 - Did not find hive tar source file and 
> destination folder properties in cluster-env.xml
>     2014-10-24 22:48:09,228 - Error while executing command 'start':
>     Traceback (most recent call last):
>       File 
> "/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py",
>  line 122, in execute
>         method(env)
>       File 
> "/var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_server.py",
>  line 38, in start
>         self.configure(env) # FOR SECURITY
>       File 
> "/var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat_server.py",
>  line 33, in configure
>         webhcat()
>       File 
> "/var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py",
>  line 156, in webhcat
>         mutable_configs = __inject_config_variables(mutable_configs)
>       File 
> "/var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/HIVE/package/scripts/webhcat.py",
>  line 46, in __inject_config_variables
>         if prop_value:
>       File 
> "/usr/lib/python2.6/site-packages/resource_management/libraries/script/config_dictionary.py",
>  line 94, in __getattr__
>         raise Fail("Configuration parameter '"+self.name+"' was not found in 
> configurations dictionary!")
>     Fail: Configuration parameter 'templeton.sqoop.archive' was not found in 
> configurations dictionary!
>     



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to