[5/7] ambari git commit: AMBARI-22488 RU: Restarting HiveServer2 on RU failed (dgrinenko)
AMBARI-22488 RU: Restarting HiveServer2 on RU failed (dgrinenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a49be4af Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a49be4af Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a49be4af Branch: refs/heads/branch-feature-AMBARI-22457 Commit: a49be4af86484a55816cf8bf3aba06d24e176335 Parents: eecd851 Author: Dmytro GrinenkoAuthored: Tue Nov 21 16:59:17 2017 +0200 Committer: Dmytro Grinenko Committed: Tue Nov 21 16:59:17 2017 +0200 -- .../libraries/functions/copy_tarball.py | 60 ++-- 1 file changed, 29 insertions(+), 31 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/a49be4af/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py -- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py index b05c97c..d1c295d 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py @@ -190,6 +190,7 @@ def get_sysprep_skip_copy_tarballs_hdfs(): sysprep_skip_copy_tarballs_hdfs = default("/configurations/cluster-env/sysprep_skip_copy_tarballs_hdfs", False) return sysprep_skip_copy_tarballs_hdfs + def get_tarball_paths(name, use_upgrading_version_during_upgrade=True, custom_source_file=None, custom_dest_file=None): """ For a given tarball name, get the source and destination paths to use. @@ -197,50 +198,47 @@ def get_tarball_paths(name, use_upgrading_version_during_upgrade=True, custom_so :param use_upgrading_version_during_upgrade: :param custom_source_file: If specified, use this source path instead of the default one from the map. :param custom_dest_file: If specified, use this destination path instead of the default one from the map. - :return: A tuple of (success status, source path, destination path, optional preparation function which is invoked to setup the tarball) + :return: A tuple of success status, source path, destination path, optional preparation function which is invoked to setup the tarball """ stack_name = Script.get_stack_name() - if not stack_name: -Logger.error("Cannot copy {0} tarball to HDFS because stack name could not be determined.".format(str(name))) -return False, None, None + try: +if not stack_name: + raise ValueError("Cannot copy {0} tarball to HDFS because stack name could not be determined.".format(str(name))) - if name is None or name.lower() not in TARBALL_MAP: -Logger.error("Cannot copy tarball to HDFS because {0} is not supported in stack {1} for this operation.".format(str(name), str(stack_name))) -return False, None, None +if name is None or name.lower() not in TARBALL_MAP: + raise ValueError("Cannot copy tarball to HDFS because {0} is not supported in stack {1} for this operation.".format(str(name), str(stack_name))) - service = TARBALL_MAP[name.lower()]['service'] +service = TARBALL_MAP[name.lower()] +service_name = service['service'] +stack_version = get_current_version(service=service_name, use_upgrading_version_during_upgrade=use_upgrading_version_during_upgrade) +stack_root = Script.get_stack_root() - stack_version = get_current_version(service=service, use_upgrading_version_during_upgrade=use_upgrading_version_during_upgrade) - if not stack_version: -Logger.error("Cannot copy {0} tarball to HDFS because stack version could be be determined.".format(str(name))) -return False, None, None +if not stack_version or not stack_root: + raise ValueError("Cannot copy {0} tarball to HDFS because stack version could be be determined.".format(str(name))) - stack_root = Script.get_stack_root() - if not stack_root: -Logger.error("Cannot copy {0} tarball to HDFS because stack root could be be determined.".format(str(name))) -return False, None, None +source_file, dest_file = service['dirs'] - (source_file, dest_file) = TARBALL_MAP[name.lower()]['dirs'] +if custom_source_file is not None: + source_file = custom_source_file - if custom_source_file is not None: -source_file = custom_source_file +if custom_dest_file is not None: + dest_file = custom_dest_file - if custom_dest_file is not None: -dest_file = custom_dest_file +source_file = source_file.replace(STACK_NAME_PATTERN, stack_name.lower()) +dest_file = dest_file.replace(STACK_NAME_PATTERN, stack_name.lower()) - source_file
ambari git commit: AMBARI-22488 RU: Restarting HiveServer2 on RU failed (dgrinenko)
Repository: ambari Updated Branches: refs/heads/branch-2.6 eecd8513a -> a49be4af8 AMBARI-22488 RU: Restarting HiveServer2 on RU failed (dgrinenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a49be4af Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a49be4af Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a49be4af Branch: refs/heads/branch-2.6 Commit: a49be4af86484a55816cf8bf3aba06d24e176335 Parents: eecd851 Author: Dmytro GrinenkoAuthored: Tue Nov 21 16:59:17 2017 +0200 Committer: Dmytro Grinenko Committed: Tue Nov 21 16:59:17 2017 +0200 -- .../libraries/functions/copy_tarball.py | 60 ++-- 1 file changed, 29 insertions(+), 31 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/a49be4af/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py -- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py index b05c97c..d1c295d 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py @@ -190,6 +190,7 @@ def get_sysprep_skip_copy_tarballs_hdfs(): sysprep_skip_copy_tarballs_hdfs = default("/configurations/cluster-env/sysprep_skip_copy_tarballs_hdfs", False) return sysprep_skip_copy_tarballs_hdfs + def get_tarball_paths(name, use_upgrading_version_during_upgrade=True, custom_source_file=None, custom_dest_file=None): """ For a given tarball name, get the source and destination paths to use. @@ -197,50 +198,47 @@ def get_tarball_paths(name, use_upgrading_version_during_upgrade=True, custom_so :param use_upgrading_version_during_upgrade: :param custom_source_file: If specified, use this source path instead of the default one from the map. :param custom_dest_file: If specified, use this destination path instead of the default one from the map. - :return: A tuple of (success status, source path, destination path, optional preparation function which is invoked to setup the tarball) + :return: A tuple of success status, source path, destination path, optional preparation function which is invoked to setup the tarball """ stack_name = Script.get_stack_name() - if not stack_name: -Logger.error("Cannot copy {0} tarball to HDFS because stack name could not be determined.".format(str(name))) -return False, None, None + try: +if not stack_name: + raise ValueError("Cannot copy {0} tarball to HDFS because stack name could not be determined.".format(str(name))) - if name is None or name.lower() not in TARBALL_MAP: -Logger.error("Cannot copy tarball to HDFS because {0} is not supported in stack {1} for this operation.".format(str(name), str(stack_name))) -return False, None, None +if name is None or name.lower() not in TARBALL_MAP: + raise ValueError("Cannot copy tarball to HDFS because {0} is not supported in stack {1} for this operation.".format(str(name), str(stack_name))) - service = TARBALL_MAP[name.lower()]['service'] +service = TARBALL_MAP[name.lower()] +service_name = service['service'] +stack_version = get_current_version(service=service_name, use_upgrading_version_during_upgrade=use_upgrading_version_during_upgrade) +stack_root = Script.get_stack_root() - stack_version = get_current_version(service=service, use_upgrading_version_during_upgrade=use_upgrading_version_during_upgrade) - if not stack_version: -Logger.error("Cannot copy {0} tarball to HDFS because stack version could be be determined.".format(str(name))) -return False, None, None +if not stack_version or not stack_root: + raise ValueError("Cannot copy {0} tarball to HDFS because stack version could be be determined.".format(str(name))) - stack_root = Script.get_stack_root() - if not stack_root: -Logger.error("Cannot copy {0} tarball to HDFS because stack root could be be determined.".format(str(name))) -return False, None, None +source_file, dest_file = service['dirs'] - (source_file, dest_file) = TARBALL_MAP[name.lower()]['dirs'] +if custom_source_file is not None: + source_file = custom_source_file - if custom_source_file is not None: -source_file = custom_source_file +if custom_dest_file is not None: + dest_file = custom_dest_file - if custom_dest_file is not None: -dest_file = custom_dest_file +source_file = source_file.replace(STACK_NAME_PATTERN, stack_name.lower()) +dest_file =