Repository: ambari Updated Branches: refs/heads/trunk 46176c9aa -> 7cb18c22b
AMBARI-15712. Flume Handler Start fails while installing without HDFS (aonishuk) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7cb18c22 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7cb18c22 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7cb18c22 Branch: refs/heads/trunk Commit: 7cb18c22b85a196a30ed941cf34f63121fdd639f Parents: 46176c9 Author: Andrew Onishuk <[email protected]> Authored: Tue Apr 5 15:19:58 2016 +0300 Committer: Andrew Onishuk <[email protected]> Committed: Tue Apr 5 15:19:58 2016 +0300 ---------------------------------------------------------------------- .../HDP/2.0.6/hooks/before-START/scripts/rack_awareness.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/7cb18c22/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/rack_awareness.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/rack_awareness.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/rack_awareness.py index 7be1561..548f051 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/rack_awareness.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/rack_awareness.py @@ -25,6 +25,7 @@ from resource_management.libraries.functions import format def create_topology_mapping(): import params + File(params.net_topology_mapping_data_file_path, content=Template("topology_mappings.data.j2"), owner=params.hdfs_user, @@ -40,5 +41,7 @@ def create_topology_script(): only_if=format("test -d {net_topology_script_dir}")) def create_topology_script_and_mapping(): - create_topology_mapping() - create_topology_script() + import params + if params.has_hadoop_env: + create_topology_mapping() + create_topology_script()
