Repository: ambari
Updated Branches:
  refs/heads/trunk d95e484b1 -> 93011b260


AMBARI-21872. Deploys failing with MR service check failure due to missing 
mapreduce.tar.gz (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/93011b26
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/93011b26
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/93011b26

Branch: refs/heads/trunk
Commit: 93011b2600d0e5f3d05f1b8d317239c0e34662f5
Parents: d95e484
Author: Andrew Onishuk <aonis...@hortonworks.com>
Authored: Tue Sep 5 18:41:35 2017 +0300
Committer: Andrew Onishuk <aonis...@hortonworks.com>
Committed: Tue Sep 5 18:41:35 2017 +0300

----------------------------------------------------------------------
 .../libraries/functions/copy_tarball.py         | 32 ++++++++++----------
 1 file changed, 16 insertions(+), 16 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/93011b26/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
index 795160c..61b03ca 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py
@@ -63,17 +63,17 @@ TARBALL_MAP = {
              
"/{0}/apps/{1}/spark2/spark2-{0}-yarn-archive.tar.gz".format(STACK_NAME_PATTERN,
 STACK_VERSION_PATTERN))
 }
 
-SERVICE_MAP = {
-  "slider": "SLIDER",
-  "tez": "TEZ_CLIENT",
-  "pig": "PIG",
-  "sqoop": "SQOOP",
-  "hive": "HIVE_CLIENT",
-  "mapreduce": "HDFS_CLIENT",
-  "hadoop_streaming": "MAPREDUCE2_CLIENT",
-  "tez_hive2": "HIVE_CLIENT",
-  "spark": "SPARK_CLIENT",
-  "spark2": "SPARK2_CLIENT"
+SERVICE_TO_CONFIG_MAP = {
+  "slider": "slider-env",
+  "tez": "tez-env",
+  "pig": "pig-env",
+  "sqoop": "sqoop-env",
+  "hive": "hive-env",
+  "mapreduce": "hadoop-env",
+  "hadoop_streaming": "mapred-env",
+  "tez_hive2": "hive-env",
+  "spark": "spark-env",
+  "spark2": "spark2-env"
 }
 
 def get_sysprep_skip_copy_tarballs_hdfs():
@@ -231,11 +231,11 @@ def copy_to_hdfs(name, user_group, owner, file_mode=0444, 
custom_source_file=Non
     return True
 
   if not skip_component_check:
-    #Use components installed on the node to check if a file can be copied 
into HDFS
-    local_components = default("/localComponents", [])
-    component = SERVICE_MAP.get(name)
-    if component not in local_components:
-      Logger.info("{0} is not installed on the host. Skip copying 
{1}".format(component, source_file))
+    # Check if service is installed on the cluster to check if a file can be 
copied into HDFS
+    config_name = SERVICE_TO_CONFIG_MAP.get(name)
+    config = default("/configurations/"+config_name, None)
+    if config is None:
+      Logger.info("{0} is not present on the cluster. Skip copying 
{1}".format(config_name, source_file))
       return False
 
   Logger.info("Source file: {0} , Dest file in HDFS: {1}".format(source_file, 
dest_file))

Reply via email to