This is an automated email from the ASF dual-hosted git repository.

jaimin pushed a commit to branch branch-2.7
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/branch-2.7 by this push:
     new 7a5b350  AMBARI-24472. HDFS and YARN Grafana dashboards for clusters 
with externalized HDFS does not show graphs. (jaimin) (#2074)
7a5b350 is described below

commit 7a5b3506e15f7e966ebe840c99bc8f46f7887289
Author: Jetly <[email protected]>
AuthorDate: Tue Aug 14 13:12:53 2018 -0700

    AMBARI-24472. HDFS and YARN Grafana dashboards for clusters with 
externalized HDFS does not show graphs. (jaimin) (#2074)
    
    AMBARI-24472. YARN Grafana dashboards for clusters with externalized HDFS 
does not show graphs.
---
 .../stack-hooks/after-INSTALL/scripts/params.py    |  6 +--
 .../after-INSTALL/scripts/shared_initialization.py |  2 +-
 .../stack-hooks/before-START/scripts/params.py     |  2 +
 .../before-START/scripts/shared_initialization.py  | 50 +++++++++++-----------
 .../test/python/stacks/2.0.6/configs/secured.json  |  4 ++
 .../2.0.6/hooks/before-START/test_before_start.py  | 37 ++++++++--------
 6 files changed, 54 insertions(+), 47 deletions(-)

diff --git 
a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py 
b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
index a30949d..3c0efa7 100644
--- 
a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
@@ -97,10 +97,10 @@ mapred_log_dir_prefix = 
default("/configurations/mapred-env/mapred_log_dir_prefi
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 user_group = config['configurations']['cluster-env']['user_group']
 
-namenode_host = default("/clusterHostInfo/namenode_hosts", [])
-has_namenode = not len(namenode_host) == 0
+hdfs_client_hosts = default("/clusterHostInfo/hdfs_client_hosts", [])
+has_hdfs_clients = not len(hdfs_client_hosts) == 0
 
-if has_namenode or dfs_type == 'HCFS':
+if has_hdfs_clients or dfs_type == 'HCFS':
   hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 
   mount_table_xml_inclusion_file_full_path = None
diff --git 
a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
 
b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
index d82f565..0633545 100644
--- 
a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
+++ 
b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
@@ -77,7 +77,7 @@ def setup_config():
   else:
     Logger.warning("Parameter hadoop_conf_dir is missing or directory does not 
exist. This is expected if this host does not have any Hadoop components.")
 
-  if is_hadoop_conf_dir_present and (params.has_namenode or 
stackversion.find('Gluster') >= 0 or params.dfs_type == 'HCFS'):
+  if is_hadoop_conf_dir_present and (params.has_hdfs_clients or 
stackversion.find('Gluster') >= 0 or params.dfs_type == 'HCFS'):
     # create core-site only if the hadoop config diretory exists
     XmlConfig("core-site.xml",
               conf_dir=params.hadoop_conf_dir,
diff --git 
a/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py 
b/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
index 0f29e24..e3c22ba 100644
--- 
a/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
@@ -112,6 +112,7 @@ hbase_master_hosts = 
default("/clusterHostInfo/hbase_master_hosts", [])
 hs_host = default("/clusterHostInfo/historyserver_hosts", [])
 jtnode_host = default("/clusterHostInfo/jtnode_hosts", [])
 namenode_host = default("/clusterHostInfo/namenode_hosts", [])
+hdfs_client_hosts = default("/clusterHostInfo/hdfs_client_hosts", [])
 zk_hosts = default("/clusterHostInfo/zookeeper_server_hosts", [])
 ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_hosts", [])
 cluster_name = config["clusterName"]
@@ -124,6 +125,7 @@ else:
   ams_collector_hosts = 
",".join(default("/clusterHostInfo/metrics_collector_hosts", []))
 
 has_namenode = not len(namenode_host) == 0
+has_hdfs_clients = not len(hdfs_client_hosts) == 0
 has_resourcemanager = not len(rm_host) == 0
 has_slaves = not len(slave_hosts) == 0
 has_oozie_server = not len(oozie_servers) == 0
diff --git 
a/ambari-server/src/main/resources/stack-hooks/before-START/scripts/shared_initialization.py
 
b/ambari-server/src/main/resources/stack-hooks/before-START/scripts/shared_initialization.py
index a0e6e09..f14ba96 100644
--- 
a/ambari-server/src/main/resources/stack-hooks/before-START/scripts/shared_initialization.py
+++ 
b/ambari-server/src/main/resources/stack-hooks/before-START/scripts/shared_initialization.py
@@ -67,16 +67,6 @@ def setup_hadoop():
     else:
       tc_owner = params.hdfs_user
       
-    # if WebHDFS is not enabled we need this jar to create hadoop folders and 
copy tarballs to HDFS.
-    if params.sysprep_skip_copy_fast_jar_hdfs:
-      print "Skipping copying of fast-hdfs-resource.jar as host is sys prepped"
-    elif params.dfs_type == 'HCFS' or not 
WebHDFSUtil.is_webhdfs_available(params.is_webhdfs_enabled, params.dfs_type):
-      # for source-code of jar goto contrib/fast-hdfs-resource
-      File(format("{ambari_libs_dir}/fast-hdfs-resource.jar"),
-           mode=0644,
-           content=StaticFile("fast-hdfs-resource.jar")
-      )
-      
     if os.path.exists(params.hadoop_conf_dir):
       File(os.path.join(params.hadoop_conf_dir, 'commons-logging.properties'),
            owner=tc_owner,
@@ -104,23 +94,33 @@ def setup_hadoop():
              owner=params.hdfs_user,
         )
 
-      if params.hadoop_metrics2_properties_content:
-        File(os.path.join(params.hadoop_conf_dir, 
"hadoop-metrics2.properties"),
-             owner=params.hdfs_user,
-             group=params.user_group,
-             content=InlineTemplate(params.hadoop_metrics2_properties_content)
-             )
-      else:
-        File(os.path.join(params.hadoop_conf_dir, 
"hadoop-metrics2.properties"),
-             owner=params.hdfs_user,
-             group=params.user_group,
-             content=Template("hadoop-metrics2.properties.j2")
-             )
+    create_microsoft_r_dir()
 
-    if params.dfs_type == 'HCFS' and params.has_core_site and 'ECS_CLIENT' in 
params.component_list:
-       create_dirs()
+  if params.has_hdfs_clients or params.dfs_type == 'HCFS':
+    # if WebHDFS is not enabled we need this jar to create hadoop folders and 
copy tarballs to HDFS.
+    if params.sysprep_skip_copy_fast_jar_hdfs:
+      print "Skipping copying of fast-hdfs-resource.jar as host is sys prepped"
+    elif params.dfs_type == 'HCFS' or not 
WebHDFSUtil.is_webhdfs_available(params.is_webhdfs_enabled, params.dfs_type):
+      # for source-code of jar goto contrib/fast-hdfs-resource
+      File(format("{ambari_libs_dir}/fast-hdfs-resource.jar"),
+           mode=0644,
+           content=StaticFile("fast-hdfs-resource.jar")
+           )
+    if params.hadoop_metrics2_properties_content:
+      File(os.path.join(params.hadoop_conf_dir, "hadoop-metrics2.properties"),
+           owner=params.hdfs_user,
+           group=params.user_group,
+           content=InlineTemplate(params.hadoop_metrics2_properties_content)
+           )
+    else:
+      File(os.path.join(params.hadoop_conf_dir, "hadoop-metrics2.properties"),
+           owner=params.hdfs_user,
+           group=params.user_group,
+           content=Template("hadoop-metrics2.properties.j2")
+           )
 
-    create_microsoft_r_dir()
+    if params.dfs_type == 'HCFS' and params.has_core_site and 'ECS_CLIENT' in 
params.component_list:
+      create_dirs()
 
 
 def setup_configs():
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json 
b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
index 7b3ab20..2d8bbc6 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
@@ -1192,6 +1192,10 @@
         "namenode_hosts": [
             "c6401.ambari.apache.org"
         ],
+        "hdfs_client_hosts": [
+            "c6401.ambari.apache.org",
+            "c6402.ambari.apache.org"
+        ],
         "hbase_master_hosts": [
             "c6402.ambari.apache.org"
         ],
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
 
b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
index 54dd935..b15f862 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
@@ -64,10 +64,7 @@ class TestHookBeforeStart(RMFTestCase):
                               create_parents = True,
                               cd_access = 'a',
                               )
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-        mode = 0644,
-    )
+
     self.assertResourceCalled('File', 
'/etc/hadoop/conf/commons-logging.properties',
                               content = 
Template('commons-logging.properties.j2'),
                               owner = 'hdfs',
@@ -83,6 +80,10 @@ class TestHookBeforeStart(RMFTestCase):
                               owner='hdfs',
                               
content=InlineTemplate('log4jproperties\nline2log4jproperties\nline2')
                               )
+    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+                              content = StaticFile('fast-hdfs-resource.jar'),
+                              mode = 0644,
+                              )
     self.assertResourceCalled('File', 
'/etc/hadoop/conf/hadoop-metrics2.properties',
                               content = 
InlineTemplate(self.getConfig()['configurations']['hadoop-metrics2.properties']['content']),
                               group='hadoop',
@@ -149,10 +150,6 @@ class TestHookBeforeStart(RMFTestCase):
                               create_parents = True,
                               cd_access = 'a',
                               )
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-        mode = 0644,
-    )
     self.assertResourceCalled('File', 
'/etc/hadoop/conf/commons-logging.properties',
                               content = 
Template('commons-logging.properties.j2'),
                               owner = 'root',
@@ -168,6 +165,10 @@ class TestHookBeforeStart(RMFTestCase):
                               owner='hdfs',
                               
content=InlineTemplate('log4jproperties\nline2log4jproperties\nline2')
                               )
+    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+                              content = StaticFile('fast-hdfs-resource.jar'),
+                              mode = 0644,
+                              )
     self.assertResourceCalled('File', 
'/etc/hadoop/conf/hadoop-metrics2.properties',
                               content = 
InlineTemplate(self.getConfig()['configurations']['hadoop-metrics2.properties']['content']),
                               group='hadoop',
@@ -238,11 +239,7 @@ class TestHookBeforeStart(RMFTestCase):
                               owner = 'hdfs',
                               create_parents = True,
                               cd_access = 'a',
-                              )             
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-        mode = 0644,
-    )
+                              )
     self.assertResourceCalled('File', 
'/etc/hadoop/conf/commons-logging.properties',
                               content = 
Template('commons-logging.properties.j2'),
                               owner = 'hdfs',
@@ -258,6 +255,10 @@ class TestHookBeforeStart(RMFTestCase):
                               owner='hdfs',
                               
content=InlineTemplate('log4jproperties\nline2log4jproperties\nline2')
     )
+    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+                              content = StaticFile('fast-hdfs-resource.jar'),
+                              mode = 0644,
+                              )
     self.assertResourceCalled('File', 
'/etc/hadoop/conf/hadoop-metrics2.properties',
                               content = 
InlineTemplate(self.getConfig()['configurations']['hadoop-metrics2.properties']['content']),
                               group='hadoop',
@@ -330,11 +331,7 @@ class TestHookBeforeStart(RMFTestCase):
                               owner = 'hdfs',
                               create_parents = True,
                               cd_access = 'a',
-                              )                       
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-        mode = 0644,
-    )
+                              )
     self.assertResourceCalled('File', 
'/etc/hadoop/conf/commons-logging.properties',
                               content = 
Template('commons-logging.properties.j2'),
                               owner = 'hdfs',
@@ -350,6 +347,10 @@ class TestHookBeforeStart(RMFTestCase):
                               owner='hdfs',
                               
content=InlineTemplate('log4jproperties\nline2log4jproperties\nline2')
     )
+    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
+                              content = StaticFile('fast-hdfs-resource.jar'),
+                              mode = 0644,
+                              )
     self.assertResourceCalled('File', 
'/etc/hadoop/conf/hadoop-metrics2.properties',
                               content = 
InlineTemplate(self.getConfig()['configurations']['hadoop-metrics2.properties']['content']),
                               group='hadoop',

Reply via email to