Author: swagle
Date: Thu Apr 18 20:24:42 2013
New Revision: 1469549
URL: http://svn.apache.org/r1469549
Log:
AMBARI-1679. Create ambari agent scripts for Hadoop 2.0 installation,
configuration and management. (swagle)
Added:
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/init.pp
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/package.pp
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/params.pp
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/resourcemanager.pp
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/service.pp
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/templates/
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/templates/yarn-env.sh.erb
Modified:
incubator/ambari/trunk/CHANGES.txt
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/hdfs/directory.pp
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/params.pp
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/service.pp
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/log4j.properties.erb
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp/manifests/package.pp
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp/manifests/params.pp
incubator/ambari/trunk/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
incubator/ambari/trunk/ambari-agent/src/main/python/ambari_agent/manifestGenerator.py
incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/server/Role.java
Modified: incubator/ambari/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/CHANGES.txt?rev=1469549&r1=1469548&r2=1469549&view=diff
==============================================================================
--- incubator/ambari/trunk/CHANGES.txt (original)
+++ incubator/ambari/trunk/CHANGES.txt Thu Apr 18 20:24:42 2013
@@ -12,6 +12,9 @@ Trunk (unreleased changes):
NEW FEATURES
+ AMBARI-1679. Create ambari agent scripts for Hadoop 2.0 installation,
+ configuration and management. (swagle)
+
AMBARI-1680. Add Hadoop 2.0 stack definition to Ambari. (swagle)
AMBARI-1908. HDFS Mirroring: Add Bread Crumbs and Validation. (Arun Kandregula
Modified:
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/hdfs/directory.pp
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/hdfs/directory.pp?rev=1469549&r1=1469548&r2=1469549&view=diff
==============================================================================
---
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/hdfs/directory.pp
(original)
+++
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/hdfs/directory.pp
Thu Apr 18 20:24:42 2013
@@ -30,7 +30,13 @@ define hdp-hadoop::hdfs::directory(
{
if ($service_state == 'running') {
- $mkdir_cmd = "fs -mkdir ${name}"
+
+
+ if $stack_version in ("2.0.1") {
+ $mkdir_cmd = "fs -mkdir -p ${name}"
+ } else {
+ $mkdir_cmd = "fs -mkdir ${name}"
+ }
hdp-hadoop::exec-hadoop { $mkdir_cmd:
command => $mkdir_cmd,
unless => "hadoop fs -ls ${name} >/dev/null 2>&1"
Modified:
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/params.pp
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/params.pp?rev=1469549&r1=1469548&r2=1469549&view=diff
==============================================================================
---
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/params.pp
(original)
+++
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/params.pp
Thu Apr 18 20:24:42 2013
@@ -75,6 +75,8 @@ class hdp-hadoop::params(
$namenode_opt_newsize =
hdp_default("hadoop/hadoop-env/namenode_opt_newsize","640m")
+ $hadoop_libexec_dir =
hdp_default("yarn/yarn-env/hadoop_libexec_dir","/usr/lib/hadoop/libexec")
+
### compression related
if (($hdp::params::lzo_enabled == true) and ($hdp::params::snappy_enabled ==
true)) {
$mapred_compress_map_output = true
Modified:
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/service.pp
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/service.pp?rev=1469549&r1=1469548&r2=1469549&view=diff
==============================================================================
---
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/service.pp
(original)
+++
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/manifests/service.pp
Thu Apr 18 20:24:42 2013
@@ -32,6 +32,8 @@ define hdp-hadoop::service(
#NOTE does not work if namenode and datanode are on same host
$pid_dir = "${hdp-hadoop::params::hadoop_pid_dir_prefix}/${user}"
+ $hadoop_libexec_dir = $hdp-hadoop::params::hadoop_libexec_dir
+
if (($security_enabled == true) and ($name == 'datanode')) {
$run_as_root = true
} else {
@@ -46,7 +48,7 @@ define hdp-hadoop::service(
}
$log_dir = "${hdp-hadoop::params::hdfs_log_dir_prefix}/${user}"
- $hadoop_daemon = "${hdp::params::hadoop_bin}/hadoop-daemon.sh"
+ $hadoop_daemon = "export HADOOP_LIBEXEC_DIR=${hadoop_libexec_dir} &&
${hdp::params::hadoop_bin}/hadoop-daemon.sh"
$cmd = "${hadoop_daemon} --config ${hdp-hadoop::params::conf_dir}"
if ($ensure == 'running') {
Modified:
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/log4j.properties.erb
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/log4j.properties.erb?rev=1469549&r1=1469548&r2=1469549&view=diff
==============================================================================
---
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/log4j.properties.erb
(original)
+++
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-hadoop/templates/log4j.properties.erb
Thu Apr 18 20:24:42 2013
@@ -82,6 +82,8 @@ log4j.appender.TLA.layout.ConversionPatt
#Security audit appender
#
hadoop.security.logger=INFO,console
+hadoop.security.log.maxfilesize=256MB
+hadoop.security.log.maxbackupindex=20
log4j.category.SecurityLogger=${hadoop.security.logger}
hadoop.security.log.file=SecurityAuth.audit
log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender
@@ -90,6 +92,13 @@ log4j.appender.DRFAS.layout=org.apache.l
log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
+log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
+log4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
+log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
+log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+log4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}
+log4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}
+
#
# hdfs audit logging
#
@@ -118,16 +127,16 @@ log4j.appender.MRAUDIT.DatePattern=.yyyy
# Rolling File Appender
#
-#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
-#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
+log4j.appender.RFA=org.apache.log4j.RollingFileAppender
+log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
# Logfile size and and 30-day backups
-#log4j.appender.RFA.MaxFileSize=1MB
-#log4j.appender.RFA.MaxBackupIndex=30
+log4j.appender.RFA.MaxFileSize=1MB
+log4j.appender.RFA.MaxBackupIndex=30
-#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
-#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2}
(%F:%M(%L)) - %m%n
+log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
+log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
+log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L))
- %m%n
# Custom Logging levels
Added:
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/init.pp
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/init.pp?rev=1469549&view=auto
==============================================================================
---
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/init.pp
(added)
+++
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/init.pp
Thu Apr 18 20:24:42 2013
@@ -0,0 +1,80 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+class hdp-yarn::initialize()
+{
+ $yarn_user = $hdp-yarn::params::yarn_user
+
+ ##Process package
+ hdp-yarn::package{'yarn-common':}
+
+ # Create user
+ hdp::user { $yarn_user:}
+
+ #Generate common configs
+ hdp-yarn::generate_common_configs{'yarn-common-configs':}
+
+ anchor{ 'hdp-yarn::initialize::begin': } Hdp::Package['yarn-common'] ->
Hdp::User[$yarn_user] ->
Hdp-yarn::Generate_common_configs['yarn-common-configs'] -> anchor{
'hdp-yarn::initialize::end': }
+}
+
+define hdp-yarn::generate_common_configs() {
+
+ $yarn_config_dir = $hdp-yarn::params::conf_dir
+
+ # Generate configs
+ if has_key($configuration, 'mapred-site') {
+ configgenerator::configfile{'mapred-site':
+ modulespath => $yarn_config_dir,
+ filename => 'mapred-site.xml',
+ module => 'hdp-yarn',
+ configuration => $configuration['mapred-site'],
+ owner => $yarn_user,
+ mode => 755
+ }
+ } else { # Manually overriding ownership of file installed by hadoop package
+ file { "${yarn_config_dir}/mapred-site.xml":
+ owner => $yarn_user,
+ mode => 755
+ }
+ }
+
+ if has_key($configuration, 'yarn-site') {
+ configgenerator::configfile{'yarn-site':
+ modulespath => $yarn_config_dir,
+ filename => 'yarn-site.xml',
+ module => 'hdp-yarn',
+ configuration => $configuration['yarn-site'],
+ owner => $yarn_user,
+ mode => 755
+ }
+ } else { # Manually overriding ownership of file installed by hadoop package
+ file { "${yarn_config_dir}/yarn-site.xml":
+ owner => $yarn_user,
+ mode => 755
+ }
+ }
+
+ hdp::configfile {"${yarn_config_dir}/yarn-env.sh":
+ component => 'yarn',
+ owner => $yarn_user,
+ mode => 755
+ }
+}
Added:
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/package.pp
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/package.pp?rev=1469549&view=auto
==============================================================================
---
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/package.pp
(added)
+++
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/package.pp
Thu Apr 18 20:24:42 2013
@@ -0,0 +1,28 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+define hdp-yarn::package()
+{
+ hdp::package{ $name:
+ ensure => present,
+ package_type => $package
+ }
+ anchor{ "hdp-yarn::package::${name}::begin": } -> Hdp::Package[$name] ->
anchor{ "hdp-yarn::package::${name}::end": }
+}
Added:
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/params.pp
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/params.pp?rev=1469549&view=auto
==============================================================================
---
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/params.pp
(added)
+++
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/params.pp
Thu Apr 18 20:24:42 2013
@@ -0,0 +1,38 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+class hdp-yarn::params(
+) inherits hdp::params
+{
+
+ $conf_dir = $hdp::params::yarn_conf_dir
+
+ ## yarn-env
+ $hadoop_libexec_dir =
hdp_default("yarn/yarn-env/hadoop_libexec_dir","/usr/lib/hadoop/libexec")
+
+ $hadoop_common_home =
hdp_default("yarn/yarn-env/hadoop_common_home","/usr/lib/hadoop")
+ $hadoop_hdfs_home =
hdp_default("yarn/yarn-env/hadoop_hdfs_home","/usr/lib/hadoop-hdfs")
+ $hadoop_mapred_home =
hdp_default("yarn/yarn-env/hadoop_mapred_home","/usr/lib/hadoop-yarn")
+ $hadoop_yarn_home =
hdp_default("yarn/yarn-env/hadoop_yarn_home","/usr/lib/hadoop-yarn")
+
+ $yarn_log_dir_prefix =
hdp_default("hadoop/yarn-env/yarn_log_dir_prefix","/var/log/hadoop-yarn")
+ $yarn_pid_dir_prefix =
hdp_default("hadoop/yarn-env/yarn_pid_dir_prefix","/var/run/hadoop-yarn")
+
+}
Added:
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/resourcemanager.pp
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/resourcemanager.pp?rev=1469549&view=auto
==============================================================================
---
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/resourcemanager.pp
(added)
+++
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/resourcemanager.pp
Thu Apr 18 20:24:42 2013
@@ -0,0 +1,53 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+class hdp-yarn::resourcemanager(
+ $service_state = $hdp::params::cluster_service_state,
+ $opts = {}
+) inherits hdp-yarn::params
+{
+ $yarn_user = $hdp-yarn::params::yarn_user
+
+ if ($service_state == 'no_op') {
+ } elsif ($service_state in 'installed_and_configured') {
+
+ include hdp-yarn::initialize
+
+ ##Process package
+ hdp-yarn::package{'yarn-resourcemanager':}
+
+ } elsif ($service_state in ['running','stopped']) {
+
+ if ( ($service_state == 'installed_and_configured') and
+ ($security_enabled == true) and ($kerberos_install_type ==
"AMBARI_SET_KERBEROS") ) {
+ hdp_fail("Security not yet implemented for resource manager")
+ }
+
+ include hdp-yarn::initialize
+
+ hdp-yarn::service{ 'resourcemanager':
+ ensure => $service_state,
+ user => $yarn_user
+ }
+
+ } else {
+ hdp_fail("TODO not implemented yet: service_state = ${service_state}")
+ }
+}
\ No newline at end of file
Added:
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/service.pp
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/service.pp?rev=1469549&view=auto
==============================================================================
---
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/service.pp
(added)
+++
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/manifests/service.pp
Thu Apr 18 20:24:42 2013
@@ -0,0 +1,105 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+define hdp-yarn::service(
+ $ensure = 'running',
+ $user,
+ $initial_wait = undef,
+ $create_pid_dir = true,
+ $create_log_dir = true
+)
+{
+
+ $security_enabled = $hdp::params::security_enabled
+ $log_dir = "${hdp-yarn::params::yarn_log_dir_prefix}"
+ $pid_dir = "${hdp-yarn::params::yarn_pid_dir_prefix}/${user}"
+ $yarn_daemon = "${hdp::params::yarn_bin}/yarn-daemon.sh"
+ $hadoop_libexec_dir = $hdp-yarn::params::hadoop_libexec_dir
+
+ $cmd = "export HADOOP_LIBEXEC_DIR=${hadoop_libexec_dir} && ${yarn_daemon}
--config ${hdp-yarn::params::conf_dir}"
+
+
+ $pid_file = "${pid_dir}/hadoop-${user}-${name}.pid"
+
+
+
+ if ($ensure == 'running') {
+ if ($run_as_root == true) {
+ $daemon_cmd = "${cmd} start ${name}"
+ } else {
+ $daemon_cmd = "su - ${user} -c '${cmd} start ${name}'"
+ }
+ $service_is_up = "ls ${pid_file} >/dev/null 2>&1 && ps `cat ${pid_file}`
>/dev/null 2>&1"
+ } elsif ($ensure == 'stopped') {
+ if ($run_as_root == true) {
+ $daemon_cmd = "${cmd} stop ${name}"
+ } else {
+ $daemon_cmd = "su - ${user} -c '${cmd} stop ${name}'"
+ }
+ $service_is_up = undef
+ } else {
+ $daemon_cmd = undef
+ }
+
+
+ if ($create_pid_dir == true) {
+ hdp::directory_recursive_create { $pid_dir:
+ owner => $user,
+ context_tag => 'yarn_service',
+ service_state => $service_state,
+ force => true
+ }
+ }
+
+ if ($create_log_dir == true) {
+ hdp::directory_recursive_create { $log_dir:
+ owner => $user,
+ context_tag => 'yarn_service',
+ service_state => $service_state,
+ force => true
+ }
+ }
+
+ if ($daemon_cmd != undef) {
+ hdp::exec { $daemon_cmd:
+ command => $daemon_cmd,
+ unless => $service_is_up,
+ initial_wait => $initial_wait
+ }
+ }
+
+ anchor{"hdp-yarn::service::${name}::begin":}
+ anchor{"hdp-yarn::service::${name}::end":}
+ if ($daemon_cmd != undef) {
+ Anchor["hdp-yarn::service::${name}::begin"] -> Hdp::Exec[$daemon_cmd] ->
Anchor["hdp-yarn::service::${name}::end"]
+
+ }
+ if ($ensure == 'running') {
+ #TODO: look at Puppet resource retry and retry_sleep
+ #TODO: can make sleep contingent on $name
+ $sleep = 5
+ $post_check = "sleep ${sleep}; ${service_is_up}"
+ hdp::exec { $post_check:
+ command => $post_check,
+ unless => $service_is_up
+ }
+ Hdp::Exec[$daemon_cmd] -> Hdp::Exec[$post_check] ->
Anchor["hdp-yarn::service::${name}::end"]
+ }
+}
Added:
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/templates/yarn-env.sh.erb
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/templates/yarn-env.sh.erb?rev=1469549&view=auto
==============================================================================
---
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/templates/yarn-env.sh.erb
(added)
+++
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp-yarn/templates/yarn-env.sh.erb
Thu Apr 18 20:24:42 2013
@@ -0,0 +1,99 @@
+#/*
+# * Licensed to the Apache Software Foundation (ASF) under one
+# * or more contributor license agreements. See the NOTICE file
+# * distributed with this work for additional information
+# * regarding copyright ownership. The ASF licenses this file
+# * to you under the Apache License, Version 2.0 (the
+# * "License"); you may not use this file except in compliance
+# * with the License. You may obtain a copy of the License at
+# *
+# * http://www.apache.org/licenses/LICENSE-2.0
+# *
+# * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# */
+
+# Set Hadoop-specific environment variables here.
+
+# The only required environment variable is JAVA_HOME. All others are
+# optional. When running a distributed configuration it is best to
+# set JAVA_HOME in this file, so that it is correctly defined on
+# remote nodes.
+
+export
HADOOP_COMMON_HOME=<%=scope.function_hdp_template_var("hadoop_common_home")%>
+export
HADOOP_HDFS_HOME=<%=scope.function_hdp_template_var("hadoop_hdfs_home")%>
+export
HADOOP_MAPRED_HOME=<%=scope.function_hdp_template_var("hadoop_mapred_home")%>
+export
HADOOP_YARN_HOME=<%=scope.function_hdp_template_var("hadoop_yarn_home")%>
+
+export
YARN_LOG_DIR=<%=scope.function_hdp_template_var("yarn_log_dir_prefix")%>/$USER
+export
YARN_PID_DIR=<%=scope.function_hdp_template_var("yarn_pid_dir_prefix")%>/$USER
+
+export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
+
+# The java implementation to use. Required.
+export JAVA_HOME=<%=scope.function_hdp_java_home()%>
+export HADOOP_HOME_WARN_SUPPRESS=1
+
+# Hadoop Configuration Directory
+#TODO: if env var set that can cause problems
+export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-/etc/hadoop/conf}
+
+# The maximum amount of heap to use, in MB. Default is 1000.
+export HADOOP_HEAPSIZE="1024"
+
+export HADOOP_NAMENODE_INIT_HEAPSIZE="-Xms1024m"
+
+# Extra Java runtime options. Empty by default.
+export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}"
+
+# Command specific options appended to HADOOP_OPTS when specified
+export HADOOP_NAMENODE_OPTS="-server -XX:ParallelGCThreads=8
-XX:+UseConcMarkSweepGC -XX:ErrorFile=/var/log/hadoop/$USER/hs_err_pid%p.log
-XX:NewSize=200m -XX:MaxNewSize=640m -Xloggc:/var/log/hadoop/$USER/gc.log-`date
+'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps
-XX:+PrintGCDateStamps -Xms1024m -Xmx1024m -Dhadoop.security.logger=INFO,DRFAS
-Dhdfs.audit.logger=INFO,DRFAAUDIT ${HADOOP_NAMENODE_OPTS}"
+HADOOP_JOBTRACKER_OPTS="-server -XX:ParallelGCThreads=8
-XX:+UseConcMarkSweepGC -XX:ErrorFile=/var/log/hadoop/$USER/hs_err_pid%p.log
-XX:NewSize=200m -XX:MaxNewSize=200m -Xloggc:/var/log/hadoop/$USER/gc.log-`date
+'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps
-XX:+PrintGCDateStamps -Xmx1024m -Dhadoop.security.logger=INFO,DRFAS
-Dmapred.audit.logger=INFO,MRAUDIT
-Dhadoop.mapreduce.jobsummary.logger=INFO,JSA ${HADOOP_JOBTRACKER_OPTS}"
+
+HADOOP_TASKTRACKER_OPTS="-server -Xmx1024m
-Dhadoop.security.logger=ERROR,console -Dmapred.audit.logger=ERROR,console
${HADOOP_TASKTRACKER_OPTS}"
+HADOOP_DATANODE_OPTS="-Xmx1024m -Dhadoop.security.logger=ERROR,DRFAS
${HADOOP_DATANODE_OPTS}"
+HADOOP_BALANCER_OPTS="-server -Xmx1024m ${HADOOP_BALANCER_OPTS}"
+
+export HADOOP_SECONDARYNAMENODE_OPTS="-server -XX:ParallelGCThreads=8
-XX:+UseConcMarkSweepGC -XX:ErrorFile=/var/log/hadoop/$USER/hs_err_pid%p.log
-XX:NewSize=200m -XX:MaxNewSize=640m -Xloggc:/var/log/hadoop/$USER/gc.log-`date
+'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps
-XX:+PrintGCDateStamps ${HADOOP_NAMENODE_INIT_HEAPSIZE} -Xmx1024m
-Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT
${HADOOP_SECONDARYNAMENODE_OPTS}"
+
+# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
+export HADOOP_CLIENT_OPTS="-Xmx128m ${HADOOP_CLIENT_OPTS}"
+#HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData ${HADOOP_JAVA_PLATFORM_OPTS}"
+
+# On secure datanodes, user to run the datanode as after dropping privileges
+export HADOOP_SECURE_DN_USER=hdfs
+
+# Extra ssh options. Empty by default.
+export HADOOP_SSH_OPTS="-o ConnectTimeout=5 -o SendEnv=HADOOP_CONF_DIR"
+
+# Where log files are stored. $HADOOP_HOME/logs by default.
+export HADOOP_LOG_DIR=/var/log/hadoop/$USER
+
+
+# Where log files are stored in the secure data environment.
+export HADOOP_SECURE_DN_LOG_DIR=/var/log/hadoop/$HADOOP_SECURE_DN_USER
+
+# File naming remote slave hosts. $HADOOP_HOME/conf/slaves by default.
+# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves
+
+# host:path where hadoop code should be rsync'd from. Unset by default.
+# export HADOOP_MASTER=master:/home/$USER/src/hadoop
+
+# Seconds to sleep between slave commands. Unset by default. This
+# can be useful in large clusters, where, e.g., slave rsyncs can
+# otherwise arrive faster than the master can service them.
+# export HADOOP_SLAVE_SLEEP=0.1
+
+# The directory where pid files are stored. /tmp by default.
+export HADOOP_PID_DIR=/var/run/hadoop/$USER
+export HADOOP_SECURE_DN_PID_DIR=/var/run/hadoop/$HADOOP_SECURE_DN_USER
+
+# A string representing this instance of hadoop. $USER by default.
+export HADOOP_IDENT_STRING=$USER
+
+# The scheduling priority for daemon processes. See 'man nice'.
+
+# export HADOOP_NICENESS=10
Modified:
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp/manifests/package.pp
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp/manifests/package.pp?rev=1469549&r1=1469548&r2=1469549&view=diff
==============================================================================
---
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp/manifests/package.pp
(original)
+++
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp/manifests/package.pp
Thu Apr 18 20:24:42 2013
@@ -49,29 +49,52 @@ define hdp::package::process_pkg(
$lzo_needed
)
{
+
+ $stack_version = $hdp::params::stack_version
debug("##Processing package:
$ensure,$package_type,$size,$java_needed,$lzo_needed")
include hdp::params
- if hdp_is_empty($hdp::params::alt_package_names[$package_type]) {
+ if hdp_is_empty($hdp::params::package_names[$package_type]) {
hdp_fail("No packages for $package_type")
}
+
+ ## Process packages depending on stack
+ if hdp_is_empty($hdp::params::package_names[$package_type][$stack_version]) {
- if hdp_is_empty($hdp::params::alt_package_names[$package_type][$size]) {
+ if hdp_is_empty($hdp::params::package_names[$package_type][ALL]) {
+ hdp_fail("No packages for $package_type")
+ }
+ else {
+ $packages_list_by_stack = $hdp::params::package_names[$package_type][ALL]
+ }
+ }
+ else {
+ $packages_list_by_stack =
$hdp::params::package_names[$package_type][$stack_version]
+ }
+
+ debug("##Pkg for stack: $packages_list_by_stack")
+
+ ## Process packages depending on arch
+ if hdp_is_empty($packages_list_by_stack[$size]) {
- if hdp_is_empty($hdp::params::alt_package_names[$package_type][ALL]) {
+ if hdp_is_empty($packages_list_by_stack[ALL]) {
hdp_fail("No packages for $package_type")
}
else {
- $packages_list_by_size =
$hdp::params::alt_package_names[$package_type][ALL]
+ $packages_list_by_size = $packages_list_by_stack[ALL]
}
}
else {
- $packages_list_by_size =
$hdp::params::alt_package_names[$package_type][$size]
+ $packages_list_by_size = $packages_list_by_stack[$size]
}
+
+ debug("##Pkg for arch: $packages_list_by_size")
+
+ ## Process packages depending on os
if hdp_is_empty($packages_list_by_size[$hdp::params::hdp_os_type]) {
if hdp_is_empty($packages_list_by_size[ALL]) {
Modified:
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp/manifests/params.pp
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp/manifests/params.pp?rev=1469549&r1=1469548&r2=1469549&view=diff
==============================================================================
---
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp/manifests/params.pp
(original)
+++
incubator/ambari/trunk/ambari-agent/src/main/puppet/modules/hdp/manifests/params.pp
Thu Apr 18 20:24:42 2013
@@ -66,6 +66,10 @@ class hdp::params()
$hdp_os = $::operatingsystem
$hdp_os_version = $::operatingsystemrelease
+
+
+ ## Stack version
+ $stack_version = hdp_default("stack_version", "1.3.0")
case $::operatingsystem {
@@ -131,6 +135,7 @@ class hdp::params()
$hdfs_user = hdp_default("hdfs_user","hdfs")
$mapred_user = hdp_default("mapred_user","mapred")
+ $yarn_user = hdp_default("yarn_user","yarn")
$zk_user = hdp_default("zk_user","zookeeper")
$hbase_user = hdp_default("hbase_user","hbase")
@@ -229,143 +234,8 @@ class hdp::params()
$apache_artifacts_download_url =
hdp_default("apache_artifacts_download_url","")
$gpl_artifacts_download_url = hdp_default("gpl_artifacts_download_url","")
- ### related to package resources
- #TODO: delete variable $package_names
- $package_names = {
- # hadoop => {
- # 32 => 'hadoop.i386',
- # 64 => 'hadoop.x86_64'
- # },
- # zookeeper => {
- # 64 => 'zookeeper.x86_64'
- # },
- # hbase => {
- # 64 => 'hbase.x86_64'
- # },
- # hcat-server => {
- # 64 => 'hcatalog-server.x86_64'
- # },
- # hcat-base => {
- # 64 => 'hcatalog.x86_64'
- # },
- # pig => {
- # 32 => 'pig.i386'
- # },
- ganglia-monitor => {
- 64 => 'ganglia-gmond-3.2.0'
- },
- ganglia-server => {
- 64 => ['ganglia-gmetad-3.2.0']
- },
- ganglia-gweb => {
- 64 => 'gweb'
- },
- ganglia-hdp-gweb-addons => {
- 64 => 'hdp_mon_ganglia_addons'
- },
- glibc-rhel6 => {
- 32 => ['glibc','glibc.i686'],
- 64 => ['glibc','glibc.i686']
- },
- nagios-addons => {
- 64 => 'hdp_mon_nagios_addons'
- },
- nagios-server => {
- 64 => 'nagios-3.2.3'
- },
- nagios-plugins => {
- 64 => 'nagios-plugins'
- },
- nagios-fping => {
- 64 =>'fping'
- },
- nagios-php-pecl-json => {
- 64 => 'php-pecl-json.x86_64'
- },
- snmp => {
- 64 => ['net-snmp'],
- },
- dashboard => {
- 64 => 'hdp_mon_dashboard'
- },
- # sqoop => {
- # 32 => 'sqoop-1.4.1-1.noarch'
- #},
- webhcat => {
- 32 => 'hcatalog',
- 64 => 'hcatalog'
- },
- oozie-client => {
- 64 => 'oozie-client'
- },
- oozie-server => {
- 64 => 'oozie'
- },
- lzo-rhel5 => {
- 32 => ['lzo','lzo.i386','lzo-devel','lzo-devel.i386'],
- 64 => ['lzo','lzo.i386','lzo-devel','lzo-devel.i386']
- },
- lzo-rhel6 => {
- 32 => ['lzo','lzo.i686','lzo-devel','lzo-devel.i686'],
- 64 => ['lzo','lzo.i686','lzo-devel','lzo-devel.i686']
- },
- #TODO: make these two consistent on whether case of 64/32 bits
- snappy => {
- 32 => ['snappy','snappy-devel'],
- 64 => ['snappy','snappy-devel']
- },
- mysql => {
- 32 => ['mysql','mysql-server']
- },
- mysql-connector => {
- 64 => ['mysql-connector-java']
- },
- extjs => {
- 64 => ['extjs-2.2-1']
- },
- templeton-tar-hive => {
- 64 => ['templeton-tar-hive-0.0.1.14-1']
- },
- templeton-tar-pig => {
- 64 => ['templeton-tar-pig-0.0.1.14-1']
- },
- rrdtool-python => {
- 64 => ['python-rrdtool.x86_64']
- },
- # The 32bit version of package rrdtool-devel is removed on centos 5/6 to
prevent conflict ( BUG-2881)
- rrdtool-devel => {
- 64 => {
- 'ALL' => 'rrdtool-devel.i686',
- 'centos6' => 'rrdtool-devel.i686',
- 'centos5' => 'rrdtool-devel.i386',
- 'redhat6' => 'rrdtool-devel.i686',
- 'redhat5' => 'rrdtool-devel.i386',
- 'oraclelinux6' => 'rrdtool-devel.i686',
- 'oraclelinux5' => 'rrdtool-devel.i386'
- }
- },
- # The 32bit version of package rrdtool is removed on centos 5/6 to prevent
conflict ( BUG-2408)
- rrdtool => {
- 64 => {
- 'ALL' => 'rrdtool.i686',
- 'centos6' => 'rrdtool.i686',
- 'centos5' => 'rrdtool.i386',
- 'redhat6' => 'rrdtool.i686',
- 'redhat5' => 'rrdtool.i386',
- 'oraclelinux6' => 'rrdtool.i686',
- 'oraclelinux5' => 'rrdtool.i386'
- }
- },
- ambari-log4j => {
- 64 => ['ambari-log4j']
- },
- hue-server => {
- 64 => ['hue.noarch']
- }
- }
$packages = 'bigtop'
if ($packages == 'hdp') {
- $package_names[hadoop] = { 32 => ['hadoop.i386'], 64 => ['hadoop.x86_64']}
$mapred_smoke_test_script = "/usr/sbin/hadoop-validate-setup.sh"
$hadoop_bin = "/usr/sbin"
$hadoop_conf_dir = "/etc/hadoop"
@@ -377,12 +247,6 @@ class hdp::params()
$hadoop_jar_location = "/usr/share/hadoop"
$hbase_daemon_script = "/usr/bin/hbase-daemon.sh"
$use_32_bits_on_slaves = false
- $package_names[zookeeper] = {64 => 'zookeeper.x86_64'}
- $package_names[hbase] = {64 => 'hbase.x86_64'}
- $package_names[sqoop] = {32 => 'sqoop-1.4.1-1.noarch'}
- $package_names[pig] = { 32 => 'pig.i386'}
- $package_names[hcat-server] = { 64 => 'hcatalog-server.x86_64'}
- $package_names[hcat-base] = { 64 => 'hcatalog.x86_64'}
$zk_bin = '/usr/sbin'
$zk_smoke_test_script = '/usr/bin/zkCli.sh'
$update_zk_shell_files = false
@@ -393,13 +257,17 @@ class hdp::params()
$hive_conf_dir = "/etc/hive/conf"
} elsif ($packages == 'bigtop') {
-
- $package_names[hadoop] = {32 =>
['hadoop','hadoop-libhdfs.i386','hadoop-native.i386','hadoop-pipes.i386','hadoop-sbin.i386','hadoop-lzo',
'hadoop-lzo-native.i386'], 64 =>
['hadoop','hadoop-libhdfs','hadoop-native','hadoop-pipes','hadoop-sbin','hadoop-lzo',
'hadoop-lzo-native']}
- #$package_names[hadoop] = {32 => ['hadoop.i386','hadoop-native.i386'], 64
=> ['hadoop.x86_64','hadoop-native.x86_64']}
$mapred_smoke_test_script = "/usr/lib/hadoop/sbin/hadoop-validate-setup.sh"
- $hadoop_bin = "/usr/lib/hadoop/bin"
+
+ if $stack_version in ("2.0.1") {
+ $hadoop_bin = "/usr/lib/hadoop/sbin"
+ } else {
+ $hadoop_bin = "/usr/lib/hadoop/bin"
+ }
+ $yarn_bin = "/usr/lib/hadoop-yarn/sbin"
$hadoop_conf_dir = "/etc/hadoop/conf"
+ $yarn_conf_dir = "/etc/hadoop/conf"
$zk_conf_dir = "/etc/zookeeper/conf"
$hbase_conf_dir = "/etc/hbase/conf"
$sqoop_conf_dir = "/usr/lib/sqoop/conf"
@@ -410,12 +278,6 @@ class hdp::params()
$hadoop_jar_location = "/usr/lib/hadoop/"
$hbase_daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
$use_32_bits_on_slaves = false
- $package_names[zookeeper] = {64 => ['zookeeper']}
- $package_names[hbase] = {64 => ['hbase']}
- $package_names[sqoop] = {32 => ['sqoop'], 64 => ['sqoop']}
- $package_names[pig] = {32 => ['pig.noarch'], 64 => ['pig.noarch']}
- $package_names[hcat] = {32 => ['hcatalog'], 64 => ['hcatalog']}
- $package_names[hive] = {64 => ['hive']}
$zk_bin = '/usr/lib/zookeeper/bin'
$zk_smoke_test_script = "/usr/lib/zookeeper/bin/zkCli.sh"
$update_zk_shell_files = false
@@ -450,182 +312,268 @@ class hdp::params()
}
+ # StackId => Arch => Os
+ $package_names =
+ {
+ snmp => {
+ 'ALL' => {
+ 64 => {
+ suse =>['net-snmp'],
+ 'ALL' => ['net-snmp', 'net-snmp-utils']
+ }
+ }
+ },
- $alt_package_names =
-{
- snmp =>
- { 64 => {suse =>['net-snmp'],
- 'ALL' => ['net-snmp', 'net-snmp-utils']}
- },
+ oozie-server => {
+ 'ALL' => {
+ 64 => {
+ 'ALL' => 'oozie.noarch'
+ }
+ }
+ },
- oozie-server =>
- {
- 64 => {'ALL' => 'oozie.noarch'}
- },
+ snappy => {
+ 'ALL' => {
+ 64 => {
+ 'ALL' => ['snappy','snappy-devel']
+ }
+ }
+ },
+ hadoop => {
+ 'ALL' => {
+ 32 => {
+ 'ALL' =>
['hadoop','hadoop-libhdfs.i386','hadoop-native.i386','hadoop-pipes.i386','hadoop-sbin.i386','hadoop-lzo',
'hadoop-lzo-native.i386']
+ },
+ 64 => {
+ 'ALL' =>
['hadoop','hadoop-libhdfs','hadoop-native','hadoop-pipes','hadoop-sbin','hadoop-lzo',
'hadoop-lzo-native']
+ }
+ },
+ '2.0.1' => {
+ 64 => {
+ 'ALL' => ['hadoop','hadoop-libhdfs','hadoop-lzo',
'hadoop-lzo-native']
+ }
+ }
+ },
- snappy => {
- 64 => {'ALL' => ['snappy','snappy-devel']}
+ yarn-common => {
+ 'ALL' => {
+ 64 => {
+ 'ALL' => ['hadoop-yarn']
+ }
+ }
},
+ yarn-nodemanager => {
+ 'ALL' => {
+ 64 => {
+ 'ALL' => ['hadoop-yarn-nodemanager', 'hadoop-yarn-proxyserver',
'hadoop-yarn-resourcemanager']
+ }
+ }
+ },
- hadoop => {
- 32 => {'ALL' =>
['hadoop','hadoop-libhdfs.i386','hadoop-native.i386','hadoop-pipes.i386','hadoop-sbin.i386','hadoop-lzo',
'hadoop-lzo-native.i386']},
- 64 => {'ALL'
=>['hadoop','hadoop-libhdfs','hadoop-native','hadoop-pipes','hadoop-sbin','hadoop-lzo',
'hadoop-lzo-native']}
+ yarn-proxyserver => {
+ 'ALL' => {
+ 64 => {
+ 'ALL' => ['hadoop-yarn-proxyserver', 'hadoop-yarn-resourcemanager']
+ }
+ }
+ },
+
+ yarn-resourcemanager => {
+ 'ALL' => {
+ 64 => {
+ 'ALL' => ['hadoop-yarn-resourcemanager']
+ }
+ }
},
lzo => {
- 'ALL' => {'ALL' => ['lzo', 'lzo-devel'],
- suse => ['lzo-devel']},
+ 'ALL' => {
+ 'ALL' => {
+ 'ALL' => ['lzo', 'lzo-devel'],
+ suse => ['lzo-devel']
+ }
+ }
},
glibc=> {
- 'ALL' => {'ALL' => ['glibc','glibc.i686'],
- suse => ['glibc']},
+ 'ALL' => {
+ 'ALL' => {
+ 'ALL' => ['glibc','glibc.i686'],
+ suse => ['glibc']
+ }
+ }
},
zookeeper=> {
- 64 => {'ALL' => 'zookeeper'},
+ 'ALL' => {64 => {'ALL' => 'zookeeper'}}
},
+
hbase=> {
- 64 => {'ALL' => 'hbase'},
+ 'ALL' => {64 => {'ALL' => 'hbase'}}
},
- pig=> {
- 'ALL' => {'ALL'=>['pig.noarch']}
+ pig=> {
+ 'ALL' => {'ALL' => {'ALL'=>['pig.noarch']}}
},
sqoop=> {
- 'ALL' =>{'ALL' => ['sqoop']}
+ 'ALL' => {'ALL' =>{'ALL' => ['sqoop']}}
},
mysql-connector-java=> {
- 'ALL' =>{'ALL' => ['mysql-connector-java']}
+ 'ALL' => {'ALL' =>{'ALL' => ['mysql-connector-java']}}
},
oozie-client=> {
- '64' =>{'ALL' => ['oozie-client.noarch']}
+ 'ALL' => {'64' =>{'ALL' => ['oozie-client.noarch']}}
},
extjs=> {
- 64 =>{'ALL' => ['extjs-2.2-1']}
+ 'ALL' => {64 =>{'ALL' => ['extjs-2.2-1']}}
},
hive=> {
- 64 =>{'ALL' => ['hive']}
+ 'ALL' => {64 =>{'ALL' => ['hive']}}
},
hcat=> {
- 'ALL' =>{'ALL' => ['hcatalog']}
+ 'ALL' => {'ALL' =>{'ALL' => ['hcatalog']}}
},
mysql => {
- 64 => {'ALL' => ['mysql','mysql-server'],
- suse => ['mysql-client','mysql']}
+ 'ALL' => {
+ 64 => {
+ 'ALL' => ['mysql','mysql-server'],
+ suse => ['mysql-client','mysql']
+ }
+ }
},
+
webhcat => {
- 'ALL' => {'ALL' => 'hcatalog'}
+ 'ALL' => {'ALL' => {'ALL' => 'hcatalog'}}
},
+
webhcat-tar-hive => {
- 64 => {'ALL' => 'webhcat-tar-hive'}
+ 'ALL' => {64 => {'ALL' => 'webhcat-tar-hive'}}
},
+
webhcat-tar-pig => {
- 64 => {'ALL' =>'webhcat-tar-pig'}
+ 'ALL' => {64 => {'ALL' =>'webhcat-tar-pig'}}
},
+
dashboard => {
- 64 => {'ALL' => 'hdp_mon_dashboard'}
+ 'ALL' => {64 => {'ALL' => 'hdp_mon_dashboard'}}
},
perl =>
{
- 64 => {'ALL' => 'perl'}
+ 'ALL' => {64 => {'ALL' => 'perl'}}
},
perl-Net-SNMP =>
{
- 64 => {'ALL' => 'perl-Net-SNMP'}
+ 'ALL' => {64 => {'ALL' => 'perl-Net-SNMP'}}
},
nagios-server => {
- 64 => {'ALL' => 'nagios-3.2.3'}
+ 'ALL' => {64 => {'ALL' => 'nagios-3.2.3'}}
},
nagios-fping => {
- 64 =>{'ALL' => 'fping'}
+ 'ALL' => {64 =>{'ALL' => 'fping'}}
},
nagios-plugins => {
- 64 => {'ALL' => 'nagios-plugins-1.4.9'}
+ 'ALL' => {64 => {'ALL' => 'nagios-plugins-1.4.9'}}
},
nagios-addons => {
- 64 => {'ALL' => 'hdp_mon_nagios_addons'}
+ 'ALL' => {64 => {'ALL' => 'hdp_mon_nagios_addons'}}
},
+
nagios-php-pecl-json => {
- 64 => {'ALL' => $NOTHING,
- suse => 'php5-json',
- centos6 => $NOTHING,
- redhat6 => $NOTHING,
- oraclelinux6 => $NOTHING,
- centos5 => 'php-pecl-json.x86_64',
- redhat5 => 'php-pecl-json.x86_64',
- oraclelinux5 => 'php-pecl-json.x86_64'}
+ 'ALL' => {
+ 64 => {
+ 'ALL' => $NOTHING,
+ suse => 'php5-json',
+ centos6 => $NOTHING,
+ redhat6 => $NOTHING,
+ oraclelinux6 => $NOTHING,
+ centos5 => 'php-pecl-json.x86_64',
+ redhat5 => 'php-pecl-json.x86_64',
+ oraclelinux5 => 'php-pecl-json.x86_64'
+ }
+ }
},
ganglia-server => {
- 64 => {'ALL' => 'ganglia-gmetad-3.2.0'}
+ 'ALL' => {64 => {'ALL' => 'ganglia-gmetad-3.2.0'}}
},
ganglia-gweb => {
- 64 => {'ALL' => 'gweb'}
+ 'ALL' => {64 => {'ALL' => 'gweb'}}
},
ganglia-hdp-gweb-addons => {
- 64 => {'ALL' => 'hdp_mon_ganglia_addons'}
+ 'ALL' => {64 => {'ALL' => 'hdp_mon_ganglia_addons'}}
},
ganglia-monitor => {
- 64 => {'ALL' =>'ganglia-gmond-3.2.0'}
+ 'ALL' => {64 => {'ALL' =>'ganglia-gmond-3.2.0'}}
},
-
+
rrdtool-python => {
- 64 => {'ALL' =>'python-rrdtool.x86_64'}
+ 'ALL' => {64 => {'ALL' =>'python-rrdtool.x86_64'}}
},
# The 32bit version of package rrdtool-devel is removed on centos 5/6 to
prevent conflict ( BUG-2881)
rrdtool-devel => {
- 64 => {
- 'ALL' => 'rrdtool-devel.i686',
- 'centos6' => 'rrdtool-devel.i686',
- 'centos5' => 'rrdtool-devel.i386',
- 'redhat6' => 'rrdtool-devel.i686',
- 'redhat5' => 'rrdtool-devel.i386',
- 'oraclelinux6' => 'rrdtool-devel.i686',
- 'oraclelinux5' => 'rrdtool-devel.i386'
+ 'ALL' => {
+ 64 => {
+ 'ALL' => 'rrdtool-devel.i686',
+ 'centos6' => 'rrdtool-devel.i686',
+ 'centos5' => 'rrdtool-devel.i386',
+ 'redhat6' => 'rrdtool-devel.i686',
+ 'redhat5' => 'rrdtool-devel.i386',
+ 'oraclelinux6' => 'rrdtool-devel.i686',
+ 'oraclelinux5' => 'rrdtool-devel.i386'
}
+ }
},
# The 32bit version of package rrdtool is removed on centos 5/6 to prevent
conflict ( BUG-2408)
rrdtool => {
- 64 => {
- 'ALL' => 'rrdtool.i686',
- 'centos6' => 'rrdtool.i686',
- 'centos5' => 'rrdtool.i386',
- 'redhat6' => 'rrdtool.i686',
- 'redhat5' => 'rrdtool.i386',
- 'oraclelinux6' => 'rrdtool.i686',
- 'oraclelinux5' => 'rrdtool.i386'
+ 'ALL' => {
+ 64 => {
+ 'ALL' => 'rrdtool.i686',
+ 'centos6' => 'rrdtool.i686',
+ 'centos5' => 'rrdtool.i386',
+ 'redhat6' => 'rrdtool.i686',
+ 'redhat5' => 'rrdtool.i386',
+ 'oraclelinux6' => 'rrdtool.i686',
+ 'oraclelinux5' => 'rrdtool.i386'
}
+ }
},
hue-server => {
- 64 => {'ALL' => 'hue.noarch'}
+ 'ALL' => {64 => {'ALL' => 'hue.noarch'}}
},
ambari-log4j => {
- 64 => {'ALL' =>'ambari-log4j'}
+ 'ALL' => {
+ 64 => {
+ 'ALL' => 'ambari-log4j'
+ }
+ }
},
+
httpd => {
- 64 => {'ALL' =>'httpd',
- suse => ['apache2', 'apache2-mod_php5']}
+ 'ALL' => {
+ 64 => {
+ 'ALL' => 'httpd',
+ suse => ['apache2', 'apache2-mod_php5']
+ }
+ }
}
-
}
$repos_paths =
Modified:
incubator/ambari/trunk/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py?rev=1469549&r1=1469548&r2=1469549&view=diff
==============================================================================
---
incubator/ambari/trunk/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
(original)
+++
incubator/ambari/trunk/ambari-agent/src/main/python/ambari_agent/AmbariConfig.py
Thu Apr 18 20:24:42 2013
@@ -84,6 +84,7 @@ rolesToClass = {
'SECONDARY_NAMENODE': 'hdp-hadoop::snamenode',
'JOBTRACKER': 'hdp-hadoop::jobtracker',
'TASKTRACKER': 'hdp-hadoop::tasktracker',
+ 'RESOURCEMANAGER': 'hdp-yarn::resourcemanager',
'HDFS_CLIENT': 'hdp-hadoop::client',
'MAPREDUCE_CLIENT': 'hdp-hadoop::client',
'ZOOKEEPER_SERVER': 'hdp-zookeeper',
@@ -135,6 +136,7 @@ servicesToPidNames = {
'DATANODE': 'hadoop-{USER}-datanode.pid$',
'JOBTRACKER': 'hadoop-{USER}-jobtracker.pid$',
'TASKTRACKER': 'hadoop-{USER}-tasktracker.pid$',
+ 'RESOURCEMANAGER': 'yarn-{USER}-resourcemanager.pid$',
'OOZIE_SERVER': 'oozie.pid',
'ZOOKEEPER_SERVER': 'zookeeper_server.pid',
'TEMPLETON_SERVER': 'templeton.pid',
Modified:
incubator/ambari/trunk/ambari-agent/src/main/python/ambari_agent/manifestGenerator.py
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-agent/src/main/python/ambari_agent/manifestGenerator.py?rev=1469549&r1=1469548&r2=1469549&view=diff
==============================================================================
---
incubator/ambari/trunk/ambari-agent/src/main/python/ambari_agent/manifestGenerator.py
(original)
+++
incubator/ambari/trunk/ambari-agent/src/main/python/ambari_agent/manifestGenerator.py
Thu Apr 18 20:24:42 2013
@@ -35,7 +35,8 @@ non_global_configuration_types = ["hdfs-
"hadoop-policy", "mapred-site",
"capacity-scheduler", "hbase-site",
"hbase-policy", "hive-site", "oozie-site",
- "webhcat-site", "hdfs-exclude-file", "hue-site"]
+ "webhcat-site", "hdfs-exclude-file", "hue-site",
+ "yarn-site"]
#read static imports from file and write them to manifest
def writeImports(outputFile, modulesdir, importsList):
Modified:
incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/server/Role.java
URL:
http://svn.apache.org/viewvc/incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/server/Role.java?rev=1469549&r1=1469548&r2=1469549&view=diff
==============================================================================
---
incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/server/Role.java
(original)
+++
incubator/ambari/trunk/ambari-server/src/main/java/org/apache/ambari/server/Role.java
Thu Apr 18 20:24:42 2013
@@ -68,5 +68,6 @@ public enum Role {
MONITOR_WEBSERVER,
DECOMMISSION_DATANODE,
HUE_SERVER,
- AMBARI_SERVER_ACTION
+ AMBARI_SERVER_ACTION,
+ RESOURCEMANAGER
}