Repository: ambari Updated Branches: refs/heads/trunk c465e9e12 -> 03918cf3a
http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py index fc96b93..8b7846c 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py +++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py @@ -20,6 +20,7 @@ limitations under the License. from stacks.utils.RMFTestCase import * import json from mock.mock import MagicMock, patch +from resource_management.libraries.script import Script from resource_management.core import shell from resource_management.core.exceptions import Fail @@ -162,7 +163,7 @@ class TestDatanode(RMFTestCase): hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) - self.assert_configure_secured() + self.assert_configure_secured("2.2") self.assertResourceCalled('Directory', '/var/run/hadoop', owner = 'hdfs', group = 'hadoop', @@ -180,7 +181,7 @@ class TestDatanode(RMFTestCase): action = ['delete'], not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1', ) - self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode', + self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf start datanode', environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'}, not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1', ) @@ -203,7 +204,7 @@ class TestDatanode(RMFTestCase): hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) - self.assert_configure_secured() + self.assert_configure_secured("2.2") self.assertResourceCalled('Directory', '/var/run/hadoop', owner = 'hdfs', group = 'hadoop', @@ -221,7 +222,7 @@ class TestDatanode(RMFTestCase): action = ['delete'], not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1', ) - self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode'", + self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf start datanode'", environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'}, not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1', ) @@ -295,7 +296,7 @@ class TestDatanode(RMFTestCase): action = ['delete'], not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1', ) - self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode', + self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf stop datanode', environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'}, not_if = None, ) @@ -339,7 +340,7 @@ class TestDatanode(RMFTestCase): action = ['delete'], not_if='ls /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid` >/dev/null 2>&1', ) - self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode'", + self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ; /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf stop datanode'", environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'}, not_if = None, ) @@ -394,7 +395,11 @@ class TestDatanode(RMFTestCase): cd_access='a' ) - def assert_configure_secured(self): + def assert_configure_secured(self, stackVersion=STACK_VERSION): + conf_dir = '/etc/hadoop/conf' + if stackVersion != self.STACK_VERSION: + conf_dir = '/usr/hdp/current/hadoop-client/conf' + self.assertResourceCalled('Directory', '/etc/security/limits.d', owner = 'root', group = 'root', @@ -409,19 +414,20 @@ class TestDatanode(RMFTestCase): self.assertResourceCalled('XmlConfig', 'hdfs-site.xml', owner = 'hdfs', group = 'hadoop', - conf_dir = '/etc/hadoop/conf', + conf_dir = conf_dir, configurations = self.getConfig()['configurations']['hdfs-site'], configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site'] ) + self.assertResourceCalled('XmlConfig', 'core-site.xml', owner = 'hdfs', group = 'hadoop', - conf_dir = '/etc/hadoop/conf', + conf_dir = conf_dir, configurations = self.getConfig()['configurations']['core-site'], configuration_attributes = self.getConfig()['configuration_attributes']['core-site'], mode = 0644 ) - self.assertResourceCalled('File', '/etc/hadoop/conf/slaves', + self.assertResourceCalled('File', conf_dir + '/slaves', content = Template('slaves.j2'), owner = 'root', ) http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py index 66744eb..c08332a 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py +++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py @@ -634,7 +634,7 @@ class TestHiveServer(RMFTestCase): call_mocks = [(0,"hive-server2 - 2.2.0.0-2041"), (0,"hive-server2 - 2.2.0.0-2041")] ) - self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service hiveserver2 --deregister 2.2.0.0-2041', + self.assertResourceCalled('Execute', 'hive --config /usr/hdp/current/hive-server2/conf/conf.server --service hiveserver2 --deregister 2.2.0.0-2041', path=['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'], tries=1, user='hive') http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py index 089008c..1312239 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py +++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py @@ -148,7 +148,7 @@ class TestOozieClient(RMFTestCase): hdp_stack_version = self.STACK_VERSION, target = RMFTestCase.TARGET_COMMON_SERVICES ) - self.assertResourceCalled('Directory', '/etc/oozie/conf', + self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-client/conf', owner = 'oozie', group = 'hadoop', recursive = True @@ -157,39 +157,39 @@ class TestOozieClient(RMFTestCase): owner = 'oozie', group = 'hadoop', mode = 0664, - conf_dir = '/etc/oozie/conf', + conf_dir = '/usr/hdp/current/oozie-client/conf', configurations = self.getConfig()['configurations']['oozie-site'], configuration_attributes = self.getConfig()['configuration_attributes']['oozie-site'] ) - self.assertResourceCalled('File', '/etc/oozie/conf/oozie-env.sh', + self.assertResourceCalled('File', '/usr/hdp/current/oozie-client/conf/oozie-env.sh', owner = 'oozie', content = InlineTemplate(self.getConfig()['configurations']['oozie-env']['content']) ) - self.assertResourceCalled('File', '/etc/oozie/conf/oozie-log4j.properties', + self.assertResourceCalled('File', '/usr/hdp/current/oozie-client/conf/oozie-log4j.properties', owner = 'oozie', group = 'hadoop', mode = 0644, content = 'log4jproperties\nline2' ) - self.assertResourceCalled('File', '/etc/oozie/conf/adminusers.txt', + self.assertResourceCalled('File', '/usr/hdp/current/oozie-client/conf/adminusers.txt', content = Template('adminusers.txt.j2'), owner = 'oozie', group = 'hadoop', mode=0644, ) - self.assertResourceCalled('File', '/etc/oozie/conf/hadoop-config.xml', + self.assertResourceCalled('File', '/usr/hdp/current/oozie-client/conf/hadoop-config.xml', owner = 'oozie', group = 'hadoop', ) - self.assertResourceCalled('File', '/etc/oozie/conf/oozie-default.xml', + self.assertResourceCalled('File', '/usr/hdp/current/oozie-client/conf/oozie-default.xml', owner = 'oozie', group = 'hadoop', ) - self.assertResourceCalled('Directory', '/etc/oozie/conf/action-conf', + self.assertResourceCalled('Directory', '/usr/hdp/current/oozie-client/conf/action-conf', owner = 'oozie', group = 'hadoop', ) - self.assertResourceCalled('File', '/etc/oozie/conf/action-conf/hive.xml', + self.assertResourceCalled('File', '/usr/hdp/current/oozie-client/conf/action-conf/hive.xml', owner = 'oozie', group = 'hadoop', ) http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py index 28e2306..ff6cc34 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py +++ b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py @@ -109,23 +109,23 @@ class TestPigClient(RMFTestCase): target = RMFTestCase.TARGET_COMMON_SERVICES ) - self.assertResourceCalled('Directory', '/etc/pig/conf', + self.assertResourceCalled('Directory', '/usr/hdp/current/pig-client/conf', recursive = True, owner = 'hdfs', group = 'hadoop' ) - self.assertResourceCalled('File', '/etc/pig/conf/pig-env.sh', + self.assertResourceCalled('File', '/usr/hdp/current/pig-client/conf/pig-env.sh', owner = 'hdfs', mode=0755, content = InlineTemplate(self.getConfig()['configurations']['pig-env']['content']) ) - self.assertResourceCalled('File', '/etc/pig/conf/pig.properties', + self.assertResourceCalled('File', '/usr/hdp/current/pig-client/conf/pig.properties', owner = 'hdfs', group = 'hadoop', mode = 0644, content = 'pigproperties\nline2' ) - self.assertResourceCalled('File', '/etc/pig/conf/log4j.properties', + self.assertResourceCalled('File', '/usr/hdp/current/pig-client/conf/log4j.properties', owner = 'hdfs', group = 'hadoop', mode = 0644, http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_service_check.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_service_check.py index a936b1d..01941b8 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_service_check.py +++ b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_service_check.py @@ -76,7 +76,7 @@ class TestServiceCheck(RMFTestCase): content = StaticFile('zkSmoke.sh'), mode = 0755, ) - self.assertResourceCalled('Execute', '/tmp/zkSmoke.sh /usr/hdp/current/zookeeper-client/bin/zkCli.sh ambari-qa /etc/zookeeper/conf 2181 False /usr/bin/kinit no_keytab no_principal', + self.assertResourceCalled('Execute', '/tmp/zkSmoke.sh /usr/hdp/current/zookeeper-client/bin/zkCli.sh ambari-qa /usr/hdp/current/zookeeper-client/conf 2181 False /usr/bin/kinit no_keytab no_principal', logoutput = True, path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'], tries = 3, http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py index 84a72b4..fa3f717 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py +++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py @@ -35,6 +35,7 @@ class TestHookAfterInstall(RMFTestCase): group = 'hadoop', conf_dir = '/etc/hadoop/conf', configurations = self.getConfig()['configurations']['core-site'], - configuration_attributes = self.getConfig()['configuration_attributes']['core-site'] - ) + configuration_attributes = self.getConfig()['configuration_attributes']['core-site'], + only_if="ls /etc/hadoop/conf") + self.assertNoMoreResources() \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py index 6906a97..5c8154e 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py +++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py @@ -24,7 +24,16 @@ from resource_management import Hook @patch.object(Hook, "run_custom_hook", new = MagicMock()) class TestHookBeforeInstall(RMFTestCase): - def test_hook_default(self): + @patch("os.path.exists") + def test_hook_default(self, os_path_exists_mock): + + def side_effect(path): + if path == "/etc/hadoop/conf": + return True + return False + + os_path_exists_mock.side_effect = side_effect + self.executeScript("2.0.6/hooks/before-ANY/scripts/hook.py", classname="BeforeAnyHook", command="hook", http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_base.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_base.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_base.py index f7b634c..8d82f2a 100644 --- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_base.py +++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_base.py @@ -28,7 +28,7 @@ class TestStormBase(RMFTestCase): COMMON_SERVICES_PACKAGE_DIR = "STORM/0.9.1.2.1/package" STACK_VERSION = "2.1" - def assert_configure_default(self): + def assert_configure_default(self, confDir="/etc/storm/conf"): import params self.assertResourceCalled('Directory', '/var/log/storm', owner = 'storm', @@ -48,28 +48,28 @@ class TestStormBase(RMFTestCase): recursive = True, cd_access='a' ) - self.assertResourceCalled('Directory', '/etc/storm/conf', + self.assertResourceCalled('Directory', confDir, group = 'hadoop', recursive = True, cd_access='a' ) - self.assertResourceCalled('File', '/etc/storm/conf/config.yaml', + self.assertResourceCalled('File', confDir + '/config.yaml', owner = 'storm', content = Template('config.yaml.j2'), group = 'hadoop', ) - storm_yarn_content = self.call_storm_template_and_assert() + storm_yarn_content = self.call_storm_template_and_assert(confDir=confDir) self.assertTrue(storm_yarn_content.find('_JAAS_PLACEHOLDER') == -1, 'Placeholder have to be substituted') - self.assertResourceCalled('File', '/etc/storm/conf/storm-env.sh', + self.assertResourceCalled('File', confDir + '/storm-env.sh', owner = 'storm', content = InlineTemplate(self.getConfig()['configurations']['storm-env']['content']) ) return storm_yarn_content - def assert_configure_secured(self): + def assert_configure_secured(self, confDir='/etc/storm/conf'): import params self.assertResourceCalled('Directory', '/var/log/storm', owner = 'storm', @@ -89,36 +89,36 @@ class TestStormBase(RMFTestCase): recursive = True, cd_access='a' ) - self.assertResourceCalled('Directory', '/etc/storm/conf', + self.assertResourceCalled('Directory', confDir, group = 'hadoop', recursive = True, cd_access='a' ) - self.assertResourceCalled('File', '/etc/storm/conf/config.yaml', + self.assertResourceCalled('File', confDir + '/config.yaml', owner = 'storm', content = Template('config.yaml.j2'), group = 'hadoop', ) - storm_yarn_content = self.call_storm_template_and_assert() + storm_yarn_content = self.call_storm_template_and_assert(confDir=confDir) self.assertTrue(storm_yarn_content.find('_JAAS_PLACEHOLDER') == -1, 'Placeholder have to be substituted') - self.assertResourceCalled('File', '/etc/storm/conf/storm-env.sh', + self.assertResourceCalled('File', confDir + '/storm-env.sh', owner = 'storm', content = InlineTemplate(self.getConfig()['configurations']['storm-env']['content']) ) - self.assertResourceCalled('TemplateConfig', '/etc/storm/conf/storm_jaas.conf', + self.assertResourceCalled('TemplateConfig', confDir + '/storm_jaas.conf', owner = 'storm', ) return storm_yarn_content - def call_storm_template_and_assert(self): + def call_storm_template_and_assert(self, confDir="/etc/storm/conf"): import yaml_utils with RMFTestCase.env as env: storm_yarn_temlate = yaml_utils.yaml_config_template(self.getConfig()['configurations']['storm-site']) - self.assertResourceCalled('File', '/etc/storm/conf/storm.yaml', + self.assertResourceCalled('File', confDir + '/storm.yaml', owner = 'storm', content= storm_yarn_temlate, group = 'hadoop' http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_jaas_configuration.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_jaas_configuration.py b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_jaas_configuration.py index 047bfaf..74191c1 100644 --- a/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_jaas_configuration.py +++ b/ambari-server/src/test/python/stacks/2.1/STORM/test_storm_jaas_configuration.py @@ -70,7 +70,7 @@ class TestStormJaasConfiguration(TestStormBase): self.assert_configure_secured() def assert_configure_default(self): - storm_yarn_content = super(TestStormJaasConfiguration, self).assert_configure_default() + storm_yarn_content = super(TestStormJaasConfiguration, self).assert_configure_default(confDir="/usr/hdp/current/storm-nimbus/conf") self.assertTrue(storm_yarn_content.find('_JAAS_PLACEHOLDER') == -1, 'Placeholder have to be substituted') @@ -80,11 +80,11 @@ class TestStormJaasConfiguration(TestStormBase): def assert_configure_secured(self): - storm_yarn_content = super(TestStormJaasConfiguration, self).assert_configure_secured() - self.assertResourceCalled('TemplateConfig', '/etc/storm/conf/client_jaas.conf', + storm_yarn_content = super(TestStormJaasConfiguration, self).assert_configure_secured(confDir="/usr/hdp/current/storm-nimbus/conf") + self.assertResourceCalled('TemplateConfig', '/usr/hdp/current/storm-nimbus/conf/client_jaas.conf', owner = 'storm', ) - self.assertResourceCalled('File', '/etc/storm/conf/worker-launcher.cfg', + self.assertResourceCalled('File', '/usr/hdp/current/storm-nimbus/conf/worker-launcher.cfg', owner = 'root', content = Template('worker-launcher.cfg.j2', min_user_ruid = 500), group = 'hadoop', http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.1/configs/secured-storm-start.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.1/configs/secured-storm-start.json b/ambari-server/src/test/python/stacks/2.1/configs/secured-storm-start.json index a7834c2..bc313cf 100644 --- a/ambari-server/src/test/python/stacks/2.1/configs/secured-storm-start.json +++ b/ambari-server/src/test/python/stacks/2.1/configs/secured-storm-start.json @@ -41,7 +41,7 @@ "repo_info": "[{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.2.0.0\",\"osType\":\"redhat6\",\"repoId\":\"HDP-2.2\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.2.0.0\",\"latestBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.2.0.0\"},{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"osType\":\"redhat6\",\"repoId\":\"HDP-UTILS-1.1.0.20\",\"repoName\":\"HDP-UTILS\",\"defaultBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"latestBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\"}]", "group_list": "[\"hadoop\",\"users\"]", "package_list": "[{\"name\":\"storm_2_2_0_0_*\"}]", - "stack_version": "2.2", + "stack_version": "2.2", "stack_name": "HDP", "db_name": "ambari", "ambari_db_rca_driver": "org.postgresql.Driver", http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py b/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py index 4519e4e..58325e6 100644 --- a/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py +++ b/ambari-server/src/test/python/stacks/2.2/KAFKA/test_kafka_broker.py @@ -50,7 +50,7 @@ class TestKafkaBroker(RMFTestCase): cd_access = 'a' ) - self.assertResourceCalled('Directory', '/etc/kafka/conf', + self.assertResourceCalled('Directory', '/usr/hdp/current/kafka-broker/conf', owner = 'kafka', group = 'hadoop', recursive = True, @@ -86,7 +86,7 @@ class TestKafkaBroker(RMFTestCase): cd_access = 'a' ) - self.assertResourceCalled('Directory', '/etc/kafka/conf', + self.assertResourceCalled('Directory', '/usr/hdp/current/kafka-broker/conf', owner = 'kafka', group = 'hadoop', recursive = True, http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py b/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py index 6060c16..f2e4e50 100644 --- a/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py +++ b/ambari-server/src/test/python/stacks/2.2/KNOX/test_knox_gateway.py @@ -50,7 +50,7 @@ class TestKnoxGateway(RMFTestCase): group = 'knox', recursive = True ) - self.assertResourceCalled('Directory', '/etc/knox/conf', + self.assertResourceCalled('Directory', '/usr/hdp/current/knox-server/conf', owner = 'knox', group = 'knox', recursive = True @@ -59,18 +59,18 @@ class TestKnoxGateway(RMFTestCase): self.assertResourceCalled('XmlConfig', 'gateway-site.xml', owner = 'knox', group = 'knox', - conf_dir = '/etc/knox/conf', + conf_dir = '/usr/hdp/current/knox-server/conf', configurations = self.getConfig()['configurations']['gateway-site'], configuration_attributes = self.getConfig()['configuration_attributes']['gateway-site'] ) - self.assertResourceCalled('File', '/etc/knox/conf/gateway-log4j.properties', + self.assertResourceCalled('File', '/usr/hdp/current/knox-server/conf/gateway-log4j.properties', mode=0644, group='knox', owner = 'knox', content = self.getConfig()['configurations']['gateway-log4j']['content'] ) - self.assertResourceCalled('File', '/etc/knox/conf/topologies/default.xml', + self.assertResourceCalled('File', '/usr/hdp/current/knox-server/conf/topologies/default.xml', group='knox', owner = 'knox', content = InlineTemplate(self.getConfig()['configurations']['topology']['content']) @@ -81,7 +81,7 @@ class TestKnoxGateway(RMFTestCase): '/var/lib/knox/data', '/var/log/knox', '/var/run/knox', - '/etc/knox/conf'), + '/usr/hdp/current/knox-server/conf'), sudo = True, ) self.assertResourceCalled('Execute', '/usr/hdp/current/knox-server/bin/knoxcli.sh create-master --master sa', @@ -94,13 +94,13 @@ class TestKnoxGateway(RMFTestCase): not_if = "ambari-sudo.sh su knox -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]test -f /var/lib/knox/data/security/keystores/gateway.jks'", user = 'knox', ) - self.assertResourceCalled('File', '/etc/knox/conf/ldap-log4j.properties', + self.assertResourceCalled('File', '/usr/hdp/current/knox-server/conf/ldap-log4j.properties', content = '\n # Licensed to the Apache Software Foundation (ASF) under one\n # or more contributor license agreements. See the NOTICE file\n # distributed with this work for additional information\n # regarding copyright ownership. The ASF licenses this file\n # to you under the Apache License, Version 2.0 (the\n # "License"); you may not use this file except in compliance\n # with the License. You may obtain a copy of the License at\n #\n # http://www.apache.org/licenses/LICENSE-2.0\n #\n # Unless required by applicable law or agreed to in writing, software\n # distributed under the License is distributed on an "AS IS" BASIS,\n # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n # See the License for the specific language governing permissions and\n # limitations under the License.\n #testing\n\n app.log.dir=${launcher.dir}/. ./logs\n app.log.file=${launcher.name}.log\n\n log4j.rootLogger=ERROR, drfa\n log4j.logger.org.apache.directory.server.ldap.LdapServer=INFO\n log4j.logger.org.apache.directory=WARN\n\n log4j.appender.stdout=org.apache.log4j.ConsoleAppender\n log4j.appender.stdout.layout=org.apache.log4j.PatternLayout\n log4j.appender.stdout.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n\n\n log4j.appender.drfa=org.apache.log4j.DailyRollingFileAppender\n log4j.appender.drfa.File=${app.log.dir}/${app.log.file}\n log4j.appender.drfa.DatePattern=.yyyy-MM-dd\n log4j.appender.drfa.layout=org.apache.log4j.PatternLayout\n log4j.appender.drfa.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n', owner = 'knox', group = 'knox', mode = 0644, ) - self.assertResourceCalled('File', '/etc/knox/conf/users.ldif', + self.assertResourceCalled('File', '/usr/hdp/current/knox-server/conf/users.ldif', content = '\n # Licensed to the Apache Software Foundation (ASF) under one\n # or more contributor license agreements. See the NOTICE file\n # distributed with this work for additional information\n # regarding copyright ownership. The ASF licenses this file\n # to you under the Apache License, Version 2.0 (the\n # "License"); you may not use this file except in compliance\n # with the License. You may obtain a copy of the License at\n #\n # http://www.apache.org/licenses/LICENSE-2.0\n #\n # Unless required by applicable law or agreed to in writing, software\n # distributed under the License is distributed on an "AS IS" BASIS,\n # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n # See the License for the specific language governing permissions and\n # limitations under the License .\n\n version: 1\n\n # Please replace with site specific values\n dn: dc=hadoop,dc=apache,dc=org\n objectclass: organization\n objectclass: dcObject\n o: Hadoop\n dc: hadoop\n\n # Entry for a sample people container\n # Please replace with site specific values\n dn: ou=people,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:organizationalUnit\n ou: people\n\n # Entry for a sample end user\n # Please replace with site specific values\n dn: uid=guest,ou=people,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:person\n objectclass:organizationalPerson\n objectclass:inetOrgPerson\n cn: Guest\n sn: User\n uid: guest\n userPassword:guest-password\n\n # entry for sample user admin\n dn: uid =admin,ou=people,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:person\n objectclass:organizationalPerson\n objectclass:inetOrgPerson\n cn: Admin\n sn: Admin\n uid: admin\n userPassword:admin-password\n\n # entry for sample user sam\n dn: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:person\n objectclass:organizationalPerson\n objectclass:inetOrgPerson\n cn: sam\n sn: sam\n uid: sam\n userPassword:sam-password\n\n # entry for sample user tom\n dn: uid=tom,ou=people,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:person\n objectclass:organizationalPerson\n objectclass:inetOrgPerson\n cn: tom\n sn: tom\n uid: tom\n userPassword:t om-password\n\n # create FIRST Level groups branch\n dn: ou=groups,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass:organizationalUnit\n ou: groups\n description: generic groups branch\n\n # create the analyst group under groups\n dn: cn=analyst,ou=groups,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass: groupofnames\n cn: analyst\n description:analyst group\n member: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org\n member: uid=tom,ou=people,dc=hadoop,dc=apache,dc=org\n\n\n # create the scientist group under groups\n dn: cn=scientist,ou=groups,dc=hadoop,dc=apache,dc=org\n objectclass:top\n objectclass: groupofnames\n cn: scientist\n description: scientist group\n member: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org', owner = 'knox', group = 'knox', @@ -294,7 +294,7 @@ class TestKnoxGateway(RMFTestCase): group = 'knox', recursive = True ) - self.assertResourceCalled('Directory', '/etc/knox/conf', + self.assertResourceCalled('Directory', '/usr/hdp/current/knox-server/conf', owner = 'knox', group = 'knox', recursive = True @@ -303,18 +303,18 @@ class TestKnoxGateway(RMFTestCase): self.assertResourceCalled('XmlConfig', 'gateway-site.xml', owner = 'knox', group = 'knox', - conf_dir = '/etc/knox/conf', + conf_dir = '/usr/hdp/current/knox-server/conf', configurations = self.getConfig()['configurations']['gateway-site'], configuration_attributes = self.getConfig()['configuration_attributes']['gateway-site'] ) - self.assertResourceCalled('File', '/etc/knox/conf/gateway-log4j.properties', + self.assertResourceCalled('File', '/usr/hdp/current/knox-server/conf/gateway-log4j.properties', mode=0644, group='knox', owner = 'knox', content = self.getConfig()['configurations']['gateway-log4j']['content'] ) - self.assertResourceCalled('File', '/etc/knox/conf/topologies/default.xml', + self.assertResourceCalled('File', '/usr/hdp/current/knox-server/conf/topologies/default.xml', group='knox', owner = 'knox', content = InlineTemplate(self.getConfig()['configurations']['topology']['content']) @@ -325,7 +325,7 @@ class TestKnoxGateway(RMFTestCase): '/var/lib/knox/data', '/var/log/knox', '/var/run/knox', - '/etc/knox/conf'), + '/usr/hdp/current/knox-server/conf'), sudo = True, ) self.assertResourceCalled('Execute', '/usr/hdp/current/knox-server/bin/knoxcli.sh create-master --master sa', @@ -338,13 +338,13 @@ class TestKnoxGateway(RMFTestCase): not_if = "ambari-sudo.sh su knox -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]test -f /var/lib/knox/data/security/keystores/gateway.jks'", user = 'knox', ) - self.assertResourceCalled('File', '/etc/knox/conf/ldap-log4j.properties', + self.assertResourceCalled('File', '/usr/hdp/current/knox-server/conf/ldap-log4j.properties', mode=0644, group='knox', owner = 'knox', content = self.getConfig()['configurations']['ldap-log4j']['content'] ) - self.assertResourceCalled('File', '/etc/knox/conf/users.ldif', + self.assertResourceCalled('File', '/usr/hdp/current/knox-server/conf/users.ldif', mode=0644, group='knox', owner = 'knox', http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py index f930780..dd3f4f2 100644 --- a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py +++ b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py @@ -38,11 +38,11 @@ class TestPigServiceCheck(RMFTestCase): hdp_stack_version=self.STACK_VERSION, target=RMFTestCase.TARGET_COMMON_SERVICES ) - self.assertResourceCalled("ExecuteHadoop", "dfs -rmr pigsmoke.out passwd; hadoop --config /etc/hadoop/conf dfs -put /etc/passwd passwd ", + self.assertResourceCalled("ExecuteHadoop", "dfs -rmr pigsmoke.out passwd; hadoop --config /usr/hdp/current/hadoop-client/conf dfs -put /etc/passwd passwd ", try_sleep=5, tries=3, user="ambari-qa", - conf_dir="/etc/hadoop/conf", + conf_dir="/usr/hdp/current/hadoop-client/conf", security_enabled=True, principal="[email protected]", keytab="/etc/security/keytabs/smokeuser.headless.keytab", @@ -65,15 +65,15 @@ class TestPigServiceCheck(RMFTestCase): self.assertResourceCalled("ExecuteHadoop", "fs -test -e pigsmoke.out", user="ambari-qa", bin_dir="/usr/hdp/current/hadoop-client/bin", - conf_dir="/etc/hadoop/conf" + conf_dir="/usr/hdp/current/hadoop-client/conf" ) # Specific to HDP 2.2 and kerberized cluster - self.assertResourceCalled("ExecuteHadoop", "dfs -rmr pigsmoke.out passwd; hadoop --config /etc/hadoop/conf dfs -put /etc/passwd passwd ", + self.assertResourceCalled("ExecuteHadoop", "dfs -rmr pigsmoke.out passwd; hadoop --config /usr/hdp/current/hadoop-client/conf dfs -put /etc/passwd passwd ", tries=3, try_sleep=5, user="ambari-qa", - conf_dir="/etc/hadoop/conf", + conf_dir="/usr/hdp/current/hadoop-client/conf", keytab="/etc/security/keytabs/smokeuser.headless.keytab", principal="[email protected]", security_enabled=True, @@ -96,7 +96,7 @@ class TestPigServiceCheck(RMFTestCase): self.assertResourceCalled("ExecuteHadoop", "fs -test -e pigsmoke.out", user="ambari-qa", bin_dir="/usr/hdp/current/hadoop-client/bin", - conf_dir="/etc/hadoop/conf" + conf_dir="/usr/hdp/current/hadoop-client/conf" ) self.assertNoMoreResources() http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_admin.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_admin.py b/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_admin.py index 57674cd..f7334fb 100644 --- a/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_admin.py +++ b/ambari-server/src/test/python/stacks/2.2/RANGER/test_ranger_admin.py @@ -124,10 +124,10 @@ class TestRangerAdmin(RMFTestCase): logoutput = True, environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'}, ) - self.assertResourceCalled('ModifyPropertiesFile', '/etc/ranger/admin/conf/xa_system.properties', + self.assertResourceCalled('ModifyPropertiesFile', '/usr/hdp/current/ranger-admin/conf/xa_system.properties', properties = self.getConfig()['configurations']['ranger-site'], ) - self.assertResourceCalled('ModifyPropertiesFile', '/etc/ranger/admin/conf/ranger_webserver.properties', + self.assertResourceCalled('ModifyPropertiesFile', '/usr/hdp/current/ranger-admin/conf/ranger_webserver.properties', mode = 0744, properties = self.getConfig()['configurations']['ranger-site'] ) @@ -153,10 +153,10 @@ class TestRangerAdmin(RMFTestCase): logoutput = True, environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'}, ) - self.assertResourceCalled('ModifyPropertiesFile', '/etc/ranger/admin/conf/xa_system.properties', + self.assertResourceCalled('ModifyPropertiesFile', '/usr/hdp/current/ranger-admin/conf/xa_system.properties', properties = self.getConfig()['configurations']['ranger-site'], ) - self.assertResourceCalled('ModifyPropertiesFile', '/etc/ranger/admin/conf/ranger_webserver.properties', + self.assertResourceCalled('ModifyPropertiesFile', '/usr/hdp/current/ranger-admin/conf/ranger_webserver.properties', mode = 0744, properties = self.getConfig()['configurations']['ranger-site'] ) http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py b/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py index 368cdcb..af876b9 100644 --- a/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py +++ b/ambari-server/src/test/python/stacks/2.2/SLIDER/test_slider_client.py @@ -36,17 +36,17 @@ class TestSliderClient(RMFTestCase): ) self.assertResourceCalled('Directory', - '/etc/slider/conf', + '/usr/hdp/current/slider-client/conf', recursive=True ) self.assertResourceCalled('XmlConfig', 'slider-client.xml', - conf_dir='/etc/slider/conf', + conf_dir='/usr/hdp/current/slider-client/conf', configurations=self.getConfig()['configurations']['slider-client'] ) - self.assertResourceCalled('File', '/etc/slider/conf/slider-env.sh', + self.assertResourceCalled('File', '/usr/hdp/current/slider-client/conf/slider-env.sh', content = InlineTemplate(self.getConfig()['configurations']['slider-env']['content']), mode = 0755, ) @@ -62,7 +62,7 @@ class TestSliderClient(RMFTestCase): ) self.assertResourceCalled('File', - '/etc/slider/conf/log4j.properties', + '/usr/hdp/current/slider-client/conf/log4j.properties', mode=0644, content='log4jproperties\nline2' ) http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py index 25de85e..60d7924 100644 --- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py +++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py @@ -131,7 +131,7 @@ class TestJobHistoryServer(RMFTestCase): self.assertResourceCalled('HdfsDirectory', '/user/spark', security_enabled = False, keytab = UnknownConfigurationMock(), - conf_dir = '/etc/hadoop/conf', + conf_dir = '/usr/hdp/current/hadoop-client/conf', hdfs_user = 'hdfs', kinit_path_local = '/usr/bin/kinit', mode = 0775, @@ -139,26 +139,26 @@ class TestJobHistoryServer(RMFTestCase): bin_dir = '/usr/hdp/current/hadoop-client/bin', action = ['create'], ) - self.assertResourceCalled('PropertiesFile', '/etc/spark/conf/spark-defaults.conf', + self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf', key_value_delimiter = ' ', properties = self.getConfig()['configurations']['spark-defaults'], ) - self.assertResourceCalled('File', '/etc/spark/conf/spark-env.sh', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh', content = InlineTemplate(self.getConfig()['configurations']['spark-env']['content']), owner = 'spark', group = 'spark', ) - self.assertResourceCalled('File', '/etc/spark/conf/log4j.properties', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/log4j.properties', content = '\n# Set everything to be logged to the console\nlog4j.rootCategory=INFO, console\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n# Settings to quiet third party logs that are too verbose\nlog4j.logger.org.eclipse.jetty=WARN\nlog4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR\nlog4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO\nlog4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO', owner = 'spark', group = 'spark', ) - self.assertResourceCalled('File', '/etc/spark/conf/metrics.properties', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/metrics.properties', content = InlineTemplate(self.getConfig()['configurations']['spark-metrics-properties']['content']), owner = 'spark', group = 'spark', ) - self.assertResourceCalled('File', '/etc/spark/conf/java-opts', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts', content = ' -Dhdp.version=2.3.0.0-1597', owner = 'spark', group = 'spark', @@ -178,7 +178,7 @@ class TestJobHistoryServer(RMFTestCase): self.assertResourceCalled('HdfsDirectory', '/user/spark', security_enabled = True, keytab = UnknownConfigurationMock(), - conf_dir = '/etc/hadoop/conf', + conf_dir = '/usr/hdp/current/hadoop-client/conf', hdfs_user = UnknownConfigurationMock(), kinit_path_local = '/usr/bin/kinit', mode = 0775, @@ -186,26 +186,26 @@ class TestJobHistoryServer(RMFTestCase): bin_dir = '/usr/hdp/current/hadoop-client/bin', action = ['create'], ) - self.assertResourceCalled('PropertiesFile', '/etc/spark/conf/spark-defaults.conf', + self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf', key_value_delimiter = ' ', properties = self.getConfig()['configurations']['spark-defaults'], ) - self.assertResourceCalled('File', '/etc/spark/conf/spark-env.sh', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh', content = InlineTemplate(self.getConfig()['configurations']['spark-env']['content']), owner = 'spark', group = 'spark', ) - self.assertResourceCalled('File', '/etc/spark/conf/log4j.properties', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/log4j.properties', content = '\n# Set everything to be logged to the console\nlog4j.rootCategory=INFO, console\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n# Settings to quiet third party logs that are too verbose\nlog4j.logger.org.eclipse.jetty=WARN\nlog4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR\nlog4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO\nlog4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO', owner = 'spark', group = 'spark', ) - self.assertResourceCalled('File', '/etc/spark/conf/metrics.properties', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/metrics.properties', content = InlineTemplate(self.getConfig()['configurations']['spark-metrics-properties']['content']), owner = 'spark', group = 'spark', ) - self.assertResourceCalled('File', '/etc/spark/conf/java-opts', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts', content = ' -Dhdp.version=2.3.0.0-1597', owner = 'spark', group = 'spark', http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py index a1f4662..d27aec2 100644 --- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py +++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_spark_client.py @@ -59,26 +59,26 @@ class TestSparkClient(RMFTestCase): group = 'hadoop', recursive = True, ) - self.assertResourceCalled('PropertiesFile', '/etc/spark/conf/spark-defaults.conf', + self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf', key_value_delimiter = ' ', properties = self.getConfig()['configurations']['spark-defaults'], ) - self.assertResourceCalled('File', '/etc/spark/conf/spark-env.sh', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh', content = InlineTemplate(self.getConfig()['configurations']['spark-env']['content']), owner = 'spark', group = 'spark', ) - self.assertResourceCalled('File', '/etc/spark/conf/log4j.properties', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/log4j.properties', content = '\n# Set everything to be logged to the console\nlog4j.rootCategory=INFO, console\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n# Settings to quiet third party logs that are too verbose\nlog4j.logger.org.eclipse.jetty=WARN\nlog4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR\nlog4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO\nlog4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO', owner = 'spark', group = 'spark', ) - self.assertResourceCalled('File', '/etc/spark/conf/metrics.properties', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/metrics.properties', content = InlineTemplate(self.getConfig()['configurations']['spark-metrics-properties']['content']), owner = 'spark', group = 'spark', ) - self.assertResourceCalled('File', '/etc/spark/conf/java-opts', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts', content = ' -Dhdp.version=2.3.0.0-1597', owner = 'spark', group = 'spark', @@ -95,26 +95,26 @@ class TestSparkClient(RMFTestCase): group = 'hadoop', recursive = True, ) - self.assertResourceCalled('PropertiesFile', '/etc/spark/conf/spark-defaults.conf', + self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf', key_value_delimiter = ' ', properties = self.getConfig()['configurations']['spark-defaults'], ) - self.assertResourceCalled('File', '/etc/spark/conf/spark-env.sh', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/spark-env.sh', content = InlineTemplate(self.getConfig()['configurations']['spark-env']['content']), owner = 'spark', group = 'spark', ) - self.assertResourceCalled('File', '/etc/spark/conf/log4j.properties', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/log4j.properties', content = '\n# Set everything to be logged to the console\nlog4j.rootCategory=INFO, console\nlog4j.appender.console=org.apache.log4j.ConsoleAppender\nlog4j.appender.console.target=System.err\nlog4j.appender.console.layout=org.apache.log4j.PatternLayout\nlog4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n\n\n# Settings to quiet third party logs that are too verbose\nlog4j.logger.org.eclipse.jetty=WARN\nlog4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR\nlog4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO\nlog4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO', owner = 'spark', group = 'spark', ) - self.assertResourceCalled('File', '/etc/spark/conf/metrics.properties', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/metrics.properties', content = InlineTemplate(self.getConfig()['configurations']['spark-metrics-properties']['content']), owner = 'spark', group = 'spark', ) - self.assertResourceCalled('File', '/etc/spark/conf/java-opts', + self.assertResourceCalled('File', '/usr/hdp/current/spark-client/conf/java-opts', content = ' -Dhdp.version=2.3.0.0-1597', owner = 'spark', group = 'spark', http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py index 67cc36e..95c0b03 100644 --- a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py +++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py @@ -35,12 +35,12 @@ class TestMahoutClient(RMFTestCase): target = RMFTestCase.TARGET_COMMON_SERVICES ) - self.assertResourceCalled('Directory', '/etc/mahout/conf', + self.assertResourceCalled('Directory', '/usr/hdp/current/mahout-client/conf', owner = 'mahout', group = 'hadoop', recursive = True, ) - self.assertResourceCalled('File', '/etc/mahout/conf/log4j.properties', + self.assertResourceCalled('File', '/usr/hdp/current/mahout-client/conf/log4j.properties', content = self.getConfig()['configurations']['mahout-log4j']['content'], owner = 'mahout', group = 'hadoop', http://git-wip-us.apache.org/repos/asf/ambari/blob/03918cf3/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py index 5e09ad2..0d943c4 100644 --- a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py +++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py @@ -36,7 +36,7 @@ class TestMahoutClient(RMFTestCase): self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f /user/ambari-qa/mahoutsmokeoutput /user/ambari-qa/mahoutsmokeinput', security_enabled = False, keytab = UnknownConfigurationMock(), - conf_dir = '/etc/hadoop/conf', + conf_dir = '/usr/hdp/current/hadoop-client/conf', try_sleep = 5, kinit_path_local = '/usr/bin/kinit', tries = 3, @@ -49,7 +49,7 @@ class TestMahoutClient(RMFTestCase): tries = 3, bin_dir = '/usr/hdp/current/hadoop-client/bin', user = 'ambari-qa', - conf_dir = '/etc/hadoop/conf', + conf_dir = '/usr/hdp/current/hadoop-client/conf', ) self.assertResourceCalled('File', '/tmp/sample-mahout-test.txt', content = 'Test text which will be converted to sequence file.', @@ -60,12 +60,12 @@ class TestMahoutClient(RMFTestCase): tries = 3, bin_dir = '/usr/hdp/current/hadoop-client/bin', user = 'ambari-qa', - conf_dir = '/etc/hadoop/conf', + conf_dir = '/usr/hdp/current/hadoop-client/conf', ) self.assertResourceCalled('Execute', 'mahout seqdirectory --input /user/ambari-qa/mahoutsmokeinput/' 'sample-mahout-test.txt --output /user/ambari-qa/mahoutsmokeoutput/ ' '--charset utf-8', - environment = {'HADOOP_CONF_DIR': '/etc/hadoop/conf', + environment = {'HADOOP_CONF_DIR': '/usr/hdp/current/hadoop-client/conf', 'HADOOP_HOME': '/usr/hdp/current/hadoop-client', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'MAHOUT_HOME': '/usr/hdp/current/mahout-client'}, @@ -79,7 +79,7 @@ class TestMahoutClient(RMFTestCase): tries = 10, bin_dir = '/usr/hdp/current/hadoop-client/bin', user = 'ambari-qa', - conf_dir = '/etc/hadoop/conf', + conf_dir = '/usr/hdp/current/hadoop-client/conf', ) self.assertNoMoreResources()
