Repository: ambari
Updated Branches:
  refs/heads/trunk 335ef6e9a -> 64fc477df


AMBARI-18237. Certain configuration files cannot be modified through Ambari 
api. (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/64fc477d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/64fc477d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/64fc477d

Branch: refs/heads/trunk
Commit: 64fc477df3305a5c09f6114c9b31b06dde600dc4
Parents: 335ef6e
Author: Andrew Onishuk <aonis...@hortonworks.com>
Authored: Mon Sep 19 16:50:20 2016 +0300
Committer: Andrew Onishuk <aonis...@hortonworks.com>
Committed: Mon Sep 19 16:50:20 2016 +0300

----------------------------------------------------------------------
 .../src/test/python/stacks/2.0.6/configs/default.json        | 3 +++
 .../src/test/python/stacks/2.0.6/configs/secured.json        | 3 +++
 .../stacks/2.0.6/hooks/before-START/test_before_start.py     | 8 ++++----
 3 files changed, 10 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/64fc477d/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json 
b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
index 24493c9..f54b645 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
@@ -939,6 +939,9 @@
         },
         "ams-grafana-ini": {
             "content": "\n"
+        },
+        "hadoop-metrics2.properties": {
+            "content": "# Licensed to the Apache Software Foundation (ASF) 
under one or more\r\n# contributor license agreements. See the NOTICE file 
distributed with\r\n# this work for additional information regarding copyright 
ownership.\r\n# The ASF licenses this file to You under the Apache License, 
Version 2.0\r\n# (the \"License\"); you may not use this file except in 
compliance with\r\n# the License. You may obtain a copy of the License 
at\r\n#\r\n# http:\/\/www.apache.org\/licenses\/LICENSE-2.0\r\n#\r\n# Unless 
required by applicable law or agreed to in writing, software\r\n# distributed 
under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES 
OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for 
the specific language governing permissions and\r\n# limitations under the 
License.\r\n\r\n# syntax: [prefix].[source|sink|jmx].[instance].[options]\r\n# 
See package.html for org.apache.hadoop.metrics2 for details\r\n\r\n{% if 
has_gang
 lia_server 
%}\r\n*.period=60\r\n\r\n*.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31\r\n*.sink.ganglia.period=10\r\n\r\n#
 default for supportsparse is 
false\r\n*.sink.ganglia.supportsparse=true\r\n\r\n.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both\r\n.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40\r\n\r\n#
 Hook up to the 
server\r\nnamenode.sink.ganglia.servers={{ganglia_server_host}}:8661\r\ndatanode.sink.ganglia.servers={{ganglia_server_host}}:8659\r\njobtracker.sink.ganglia.servers={{ganglia_server_host}}:8662\r\ntasktracker.sink.ganglia.servers={{ganglia_server_host}}:8658\r\nmaptask.sink.ganglia.servers={{ganglia_server_host}}:8660\r\nreducetask.sink.ganglia.servers={{ganglia_server_host}}:8660\r\nresourcemanager.sink.ganglia.servers={{ganglia_server_host}}:8664\r\nnodemanager.sink.ganglia.servers={{ganglia_server_host}}:8657\r\nhistoryserver.sink.ganglia.servers={{ganglia_server_host}}:8666\r\njo
 
urnalnode.sink.ganglia.servers={{ganglia_server_host}}:8654\r\nnimbus.sink.ganglia.servers={{ganglia_server_host}}:8649\r\nsupervisor.sink.ganglia.servers={{ganglia_server_host}}:8650\r\n\r\nresourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue\r\n\r\n{%
 endif %}\r\n\r\n{% if has_metric_collector 
%}\r\n\r\n*.period={{metrics_collection_period}}\r\n*.sink.timeline.plugin.urls=file:\/\/\/usr\/lib\/ambari-metrics-hadoop-sink\/ambari-metrics-hadoop-sink.jar\r\n*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink\r\n*.sink.timeline.period={{metrics_collection_period}}\r\n*.sink.timeline.sendInterval={{metrics_report_interval}}000\r\n*.sink.timeline.slave.host.name={{hostname}}\r\n*.sink.timeline.zookeeper.quorum={{zookeeper_quorum}}\r\n*.sink.timeline.protocol={{metric_collector_protocol}}\r\n*.sink.timeline.port={{metric_collector_port}}\r\n\r\n#
 HTTPS properties\r\n*.sink.timeline.truststore.path = 
{{metric_truststore_path}}\r\n*.sink.timeline.trusts
 tore.type = {{metric_truststore_type}}\r\n*.sink.timeline.truststore.password 
= 
{{metric_truststore_password}}\r\n\r\ndatanode.sink.timeline.collector={{metric_collector_hosts}}\r\nnamenode.sink.timeline.collector={{metric_collector_hosts}}\r\nresourcemanager.sink.timeline.collector={{metric_collector_hosts}}\r\nnodemanager.sink.timeline.collector={{metric_collector_hosts}}\r\njobhistoryserver.sink.timeline.collector={{metric_collector_hosts}}\r\njournalnode.sink.timeline.collector={{metric_collector_hosts}}\r\nmaptask.sink.timeline.collector={{metric_collector_hosts}}\r\nreducetask.sink.timeline.collector={{metric_collector_hosts}}\r\napplicationhistoryserver.sink.timeline.collector={{metric_collector_hosts}}\r\n\r\nresourcemanager.sink.timeline.tagsForPrefix.yarn=Queue\r\n\r\n{%
 if is_nn_client_port_configured %}\r\n# Namenode rpc ports 
customization\r\nnamenode.sink.timeline.metric.rpc.client.port={{nn_rpc_client_port}}\r\n{%
 endif %}\r\n{% if is_nn_dn_port_configured %}\r\nnamen
 ode.sink.timeline.metric.rpc.datanode.port={{nn_rpc_dn_port}}\r\n{% endif 
%}\r\n{% if is_nn_healthcheck_port_configured 
%}\r\nnamenode.sink.timeline.metric.rpc.healthcheck.port={{nn_rpc_healthcheck_port}}\r\n{%
 endif %}\r\n\r\n{% endif %}"
         }
     },
     "configuration_attributes": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/64fc477d/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json 
b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
index 3dcf1e9..890b9f1 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
@@ -827,6 +827,9 @@
             "SSL_TRUSTSTORE_FILE_PATH": 
"/etc/hadoop/conf/ranger-plugin-truststore.jks", 
             "REPOSITORY_CONFIG_PASSWORD": "hadoop", 
             "XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
+        },
+        "hadoop-metrics2.properties": {
+            "content": "# Licensed to the Apache Software Foundation (ASF) 
under one or more\r\n# contributor license agreements. See the NOTICE file 
distributed with\r\n# this work for additional information regarding copyright 
ownership.\r\n# The ASF licenses this file to You under the Apache License, 
Version 2.0\r\n# (the \"License\"); you may not use this file except in 
compliance with\r\n# the License. You may obtain a copy of the License 
at\r\n#\r\n# http:\/\/www.apache.org\/licenses\/LICENSE-2.0\r\n#\r\n# Unless 
required by applicable law or agreed to in writing, software\r\n# distributed 
under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES 
OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for 
the specific language governing permissions and\r\n# limitations under the 
License.\r\n\r\n# syntax: [prefix].[source|sink|jmx].[instance].[options]\r\n# 
See package.html for org.apache.hadoop.metrics2 for details\r\n\r\n{% if 
has_gang
 lia_server 
%}\r\n*.period=60\r\n\r\n*.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31\r\n*.sink.ganglia.period=10\r\n\r\n#
 default for supportsparse is 
false\r\n*.sink.ganglia.supportsparse=true\r\n\r\n.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both\r\n.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40\r\n\r\n#
 Hook up to the 
server\r\nnamenode.sink.ganglia.servers={{ganglia_server_host}}:8661\r\ndatanode.sink.ganglia.servers={{ganglia_server_host}}:8659\r\njobtracker.sink.ganglia.servers={{ganglia_server_host}}:8662\r\ntasktracker.sink.ganglia.servers={{ganglia_server_host}}:8658\r\nmaptask.sink.ganglia.servers={{ganglia_server_host}}:8660\r\nreducetask.sink.ganglia.servers={{ganglia_server_host}}:8660\r\nresourcemanager.sink.ganglia.servers={{ganglia_server_host}}:8664\r\nnodemanager.sink.ganglia.servers={{ganglia_server_host}}:8657\r\nhistoryserver.sink.ganglia.servers={{ganglia_server_host}}:8666\r\njo
 
urnalnode.sink.ganglia.servers={{ganglia_server_host}}:8654\r\nnimbus.sink.ganglia.servers={{ganglia_server_host}}:8649\r\nsupervisor.sink.ganglia.servers={{ganglia_server_host}}:8650\r\n\r\nresourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue\r\n\r\n{%
 endif %}\r\n\r\n{% if has_metric_collector 
%}\r\n\r\n*.period={{metrics_collection_period}}\r\n*.sink.timeline.plugin.urls=file:\/\/\/usr\/lib\/ambari-metrics-hadoop-sink\/ambari-metrics-hadoop-sink.jar\r\n*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink\r\n*.sink.timeline.period={{metrics_collection_period}}\r\n*.sink.timeline.sendInterval={{metrics_report_interval}}000\r\n*.sink.timeline.slave.host.name={{hostname}}\r\n*.sink.timeline.zookeeper.quorum={{zookeeper_quorum}}\r\n*.sink.timeline.protocol={{metric_collector_protocol}}\r\n*.sink.timeline.port={{metric_collector_port}}\r\n\r\n#
 HTTPS properties\r\n*.sink.timeline.truststore.path = 
{{metric_truststore_path}}\r\n*.sink.timeline.trusts
 tore.type = {{metric_truststore_type}}\r\n*.sink.timeline.truststore.password 
= 
{{metric_truststore_password}}\r\n\r\ndatanode.sink.timeline.collector={{metric_collector_hosts}}\r\nnamenode.sink.timeline.collector={{metric_collector_hosts}}\r\nresourcemanager.sink.timeline.collector={{metric_collector_hosts}}\r\nnodemanager.sink.timeline.collector={{metric_collector_hosts}}\r\njobhistoryserver.sink.timeline.collector={{metric_collector_hosts}}\r\njournalnode.sink.timeline.collector={{metric_collector_hosts}}\r\nmaptask.sink.timeline.collector={{metric_collector_hosts}}\r\nreducetask.sink.timeline.collector={{metric_collector_hosts}}\r\napplicationhistoryserver.sink.timeline.collector={{metric_collector_hosts}}\r\n\r\nresourcemanager.sink.timeline.tagsForPrefix.yarn=Queue\r\n\r\n{%
 if is_nn_client_port_configured %}\r\n# Namenode rpc ports 
customization\r\nnamenode.sink.timeline.metric.rpc.client.port={{nn_rpc_client_port}}\r\n{%
 endif %}\r\n{% if is_nn_dn_port_configured %}\r\nnamen
 ode.sink.timeline.metric.rpc.datanode.port={{nn_rpc_dn_port}}\r\n{% endif 
%}\r\n{% if is_nn_healthcheck_port_configured 
%}\r\nnamenode.sink.timeline.metric.rpc.healthcheck.port={{nn_rpc_healthcheck_port}}\r\n{%
 endif %}\r\n\r\n{% endif %}"
         }
     },
     "configuration_attributes": {

http://git-wip-us.apache.org/repos/asf/ambari/blob/64fc477d/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
 
b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
index c0f279f..6e5561a 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
@@ -72,7 +72,7 @@ class TestHookBeforeStart(RMFTestCase):
                               
content='log4jproperties\nline2log4jproperties\nline2'
                               )
     self.assertResourceCalled('File', 
'/etc/hadoop/conf/hadoop-metrics2.properties',
-                              content = 
Template('hadoop-metrics2.properties.j2'),
+                              content = 
InlineTemplate(self.getConfig()['configurations']['hadoop-metrics2.properties']['content']),
                               group='hadoop',
                               owner = 'hdfs',
                               )
@@ -146,7 +146,7 @@ class TestHookBeforeStart(RMFTestCase):
                               
content='log4jproperties\nline2log4jproperties\nline2'
                               )
     self.assertResourceCalled('File', 
'/etc/hadoop/conf/hadoop-metrics2.properties',
-                              content = 
Template('hadoop-metrics2.properties.j2'),
+                              content = 
InlineTemplate(self.getConfig()['configurations']['hadoop-metrics2.properties']['content']),
                               group='hadoop',
                               owner = 'hdfs',
                               )
@@ -225,7 +225,7 @@ class TestHookBeforeStart(RMFTestCase):
                               
content='log4jproperties\nline2log4jproperties\nline2'
     )
     self.assertResourceCalled('File', 
'/etc/hadoop/conf/hadoop-metrics2.properties',
-                              content = 
Template('hadoop-metrics2.properties.j2'),
+                              content = 
InlineTemplate(self.getConfig()['configurations']['hadoop-metrics2.properties']['content']),
                               group='hadoop',
                               owner = 'hdfs',
                               )
@@ -306,7 +306,7 @@ class TestHookBeforeStart(RMFTestCase):
                               
content='log4jproperties\nline2log4jproperties\nline2'
     )
     self.assertResourceCalled('File', 
'/etc/hadoop/conf/hadoop-metrics2.properties',
-                              content = 
Template('hadoop-metrics2.properties.j2'),
+                              content = 
InlineTemplate(self.getConfig()['configurations']['hadoop-metrics2.properties']['content']),
                               group='hadoop',
                               owner = 'hdfs',
                               )

Reply via email to