AMBARI-22383. Remove Auto-Installation of LZO Libraries (ncole)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5adcea78 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5adcea78 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5adcea78 Branch: refs/heads/branch-feature-AMBARI-20859 Commit: 5adcea78792cf227f15e7c08f5bb3fe2c48883d3 Parents: b04e142 Author: Nate Cole <nc...@hortonworks.com> Authored: Wed Nov 8 14:28:59 2017 -0500 Committer: Nate Cole <nc...@hortonworks.com> Committed: Wed Nov 8 14:51:48 2017 -0500 ---------------------------------------------------------------------- .../libraries/functions/__init__.py | 1 - .../libraries/functions/get_lzo_packages.py | 50 -------------------- .../libraries/functions/package_conditions.py | 8 +--- .../DRUID/0.10.1/package/scripts/druid.py | 3 +- .../DRUID/0.10.1/package/scripts/params.py | 3 +- .../common-services/HDFS/2.1.0.2.0/metainfo.xml | 30 ------------ .../HDFS/2.1.0.2.0/package/scripts/datanode.py | 1 - .../HDFS/2.1.0.2.0/package/scripts/hdfs.py | 7 --- .../2.1.0.2.0/package/scripts/hdfs_client.py | 1 - .../2.1.0.2.0/package/scripts/install_params.py | 1 + .../2.1.0.2.0/package/scripts/journalnode.py | 1 - .../HDFS/2.1.0.2.0/package/scripts/namenode.py | 1 - .../common-services/HDFS/3.0.0.3.0/metainfo.xml | 30 ------------ .../HDFS/3.0.0.3.0/package/scripts/hdfs.py | 7 --- .../OOZIE/4.0.0.2.0/package/scripts/oozie.py | 6 +-- .../package/scripts/oozie_server_upgrade.py | 7 ++- .../4.0.0.2.0/package/scripts/params_linux.py | 1 + .../OOZIE/4.2.0.3.0/package/scripts/oozie.py | 4 -- .../package/scripts/oozie_server_upgrade.py | 5 ++ .../TEZ/0.4.0.2.1/package/scripts/tez_client.py | 32 ------------- .../TEZ/0.9.0.3.0/package/scripts/tez_client.py | 26 ---------- .../custom_actions/scripts/remove_bits.py | 2 +- .../BIGTOP/0.8/services/HDFS/metainfo.xml | 16 ------- .../0.8/services/HDFS/package/scripts/params.py | 6 +-- .../stacks/HDP/2.2/services/HDFS/metainfo.xml | 35 -------------- .../2.3.GlusterFS/services/HDFS/metainfo.xml | 10 ---- .../stacks/HDP/2.3/services/HDFS/metainfo.xml | 30 ------------ .../stacks/HDP/3.0/services/HDFS/metainfo.xml | 30 ------------ .../python/custom_actions/TestRemoveBits.py | 5 +- 29 files changed, 20 insertions(+), 339 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py index f144b2d..1e388ac 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py @@ -39,7 +39,6 @@ from resource_management.libraries.functions.version import * from resource_management.libraries.functions.format_jvm_option import * from resource_management.libraries.functions.constants import * from resource_management.libraries.functions.get_stack_version import * -from resource_management.libraries.functions.get_lzo_packages import * from resource_management.libraries.functions.setup_ranger_plugin import * from resource_management.libraries.functions.curl_krb_request import * from resource_management.libraries.functions.get_bare_principal import * http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-common/src/main/python/resource_management/libraries/functions/get_lzo_packages.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/get_lzo_packages.py b/ambari-common/src/main/python/resource_management/libraries/functions/get_lzo_packages.py deleted file mode 100644 index cfbb7d8..0000000 --- a/ambari-common/src/main/python/resource_management/libraries/functions/get_lzo_packages.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python -""" -Licensed to the Apache Software Foundation (ASF) under one -or more contributor license agreements. See the NOTICE file -distributed with this work for additional information -regarding copyright ownership. The ASF licenses this file -to you under the Apache License, Version 2.0 (the -"License"); you may not use this file except in compliance -with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -Ambari Agent - -""" -__all__ = ["get_lzo_packages"] - -from ambari_commons.os_check import OSCheck -from resource_management.libraries.functions.stack_features import check_stack_feature -from resource_management.libraries.functions import StackFeature -from resource_management.libraries.script.script import Script - -# TODO: Make list of lzo packages stack driven -def get_lzo_packages(stack_version_unformatted): - lzo_packages = [] - script_instance = Script.get_instance() - if OSCheck.is_suse_family() and int(OSCheck.get_os_major_version()) >= 12: - lzo_packages += ["liblzo2-2", "hadoop-lzo-native"] - elif OSCheck.is_redhat_family() or OSCheck.is_suse_family(): - lzo_packages += ["lzo", "hadoop-lzo-native"] - elif OSCheck.is_ubuntu_family(): - lzo_packages += ["liblzo2-2"] - - if stack_version_unformatted and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version_unformatted): - if OSCheck.is_ubuntu_family(): - lzo_packages += [script_instance.format_package_name("hadooplzo-${stack_version}") , - script_instance.format_package_name("hadooplzo-${stack_version}-native")] - else: - lzo_packages += [script_instance.format_package_name("hadooplzo_${stack_version}"), - script_instance.format_package_name("hadooplzo_${stack_version}-native")] - else: - lzo_packages += ["hadoop-lzo"] - - return lzo_packages http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py b/ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py index 31e78b9..ded63cf 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/package_conditions.py @@ -19,7 +19,7 @@ limitations under the License. Ambari Agent """ -__all__ = ["is_lzo_enabled", "should_install_phoenix", "should_install_ams_collector", "should_install_ams_grafana", +__all__ = ["should_install_phoenix", "should_install_ams_collector", "should_install_ams_grafana", "should_install_mysql", "should_install_ranger_tagsync"] import os @@ -44,12 +44,6 @@ def _has_local_components(config, components, indicator_function = any): def _has_applicable_local_component(config, components): return _has_local_components(config, components, any) -def should_install_lzo(): - config = Script.get_config() - io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None) - lzo_enabled = io_compression_codecs is not None and "com.hadoop.compression.lzo" in io_compression_codecs.lower() - return lzo_enabled - def should_install_phoenix(): phoenix_hosts = default('/clusterHostInfo/phoenix_query_server_hosts', []) phoenix_enabled = default('/configurations/hbase-env/phoenix_sql_enabled', False) http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/druid.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/druid.py b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/druid.py index ec98c3c..bb872b9 100644 --- a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/druid.py +++ b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/druid.py @@ -115,8 +115,7 @@ def druid(upgrade_type=None, nodeType=None): ) Logger.info(format("Created druid-{node_type_lowercase} jvm.config")) # Handling hadoop Lzo jars if enable and node type is hadoop related eg Overlords and MMs - if ['middleManager', 'overlord'].__contains__(node_type_lowercase) and params.lzo_enabled and len( - params.lzo_packages) > 0: + if ['middleManager', 'overlord'].__contains__(node_type_lowercase) and params.lzo_enabled: try: Logger.info( format( http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py index fd1cde6..141250d 100644 --- a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py @@ -18,7 +18,6 @@ limitations under the License. """ from ambari_commons import OSCheck -from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages from resource_management.libraries.functions import conf_select from resource_management.libraries.functions import stack_select from resource_management.libraries.resources.hdfs_resource import HdfsResource @@ -196,5 +195,5 @@ if has_metric_collector: stack_version_unformatted = str(config['hostLevelParams']['stack_version']) io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None) lzo_enabled = io_compression_codecs is not None and "com.hadoop.compression.lzo" in io_compression_codecs.lower() -lzo_packages = get_lzo_packages(stack_version_unformatted) + hadoop_lib_home = stack_root + '/' + stack_version + '/hadoop/lib' http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml index 9979de4..6bbb583 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml @@ -244,11 +244,6 @@ <package> <name>hadoop</name> </package> - <package> - <name>hadoop-lzo</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> </packages> </osSpecific> @@ -265,16 +260,6 @@ <name>snappy-devel</name> </package> <package> - <name>lzo</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadoop-lzo-native</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> <name>hadoop-libhdfs</name> </package> <package> @@ -296,16 +281,6 @@ <name>snappy-devel</name> </package> <package> - <name>liblzo2-2</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadoop-lzo-native</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> <name>hadoop-libhdfs</name> </package> </packages> @@ -324,11 +299,6 @@ <name>libsnappy-dev</name> </package> <package> - <name>liblzo2-2</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> <name>hadoop-hdfs</name> </package> <package> http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py index c0abb15..a8b0f48 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py @@ -165,7 +165,6 @@ class DataNodeDefault(DataNode): @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY) class DataNodeWindows(DataNode): def install(self, env): - import install_params self.install_packages(env) if __name__ == "__main__": http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py index 4022986..1d7fe53 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py @@ -26,7 +26,6 @@ from resource_management.core.source import Template from resource_management.core.resources.service import ServiceConfig from resource_management.libraries.resources.xml_config import XmlConfig -from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages from resource_management.core.exceptions import Fail from resource_management.core.logger import Logger from resource_management.libraries.functions.format import format @@ -143,12 +142,6 @@ def hdfs(name=None): content=Template("slaves.j2") ) - if params.lzo_enabled: - lzo_packages = get_lzo_packages(params.stack_version_unformatted) - Package(lzo_packages, - retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability, - retry_count=params.agent_stack_retry_count) - def install_snappy(): import params Directory([params.so_target_dir_x86, params.so_target_dir_x64], http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py index f2e96c3..a802e08 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py @@ -70,7 +70,6 @@ class HdfsClientDefault(HdfsClient): @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY) class HdfsClientWindows(HdfsClient): def install(self, env): - import install_params self.install_packages(env) self.configure(env) http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py index 235f231..72850b3 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py @@ -18,6 +18,7 @@ limitations under the License. """ from ambari_commons import OSCheck +exclude_packages = [] # These parameters are supposed to be referenced at installation time, before the Hadoop environment variables have been set if OSCheck.is_windows_family(): exclude_packages = [] http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py index 75b2eeb..eaa21e9 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py @@ -116,7 +116,6 @@ class JournalNodeDefault(JournalNode): @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY) class JournalNodeWindows(JournalNode): def install(self, env): - import install_params self.install_packages(env) def start(self, env): http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py index 291da05..5f4152d 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py @@ -327,7 +327,6 @@ class NameNodeDefault(NameNode): @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY) class NameNodeWindows(NameNode): def install(self, env): - import install_params self.install_packages(env) #TODO we need this for HA because of manual steps self.configure(env) http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/metainfo.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/metainfo.xml b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/metainfo.xml index e6d1166..0c629f3 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/metainfo.xml +++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/metainfo.xml @@ -270,11 +270,6 @@ <package> <name>hadoop</name> </package> - <package> - <name>hadoop-lzo</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> </packages> </osSpecific> @@ -291,16 +286,6 @@ <name>snappy-devel</name> </package> <package> - <name>lzo</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadoop-lzo-native</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> <name>hadoop-libhdfs</name> </package> </packages> @@ -319,16 +304,6 @@ <name>snappy-devel</name> </package> <package> - <name>liblzo2-2</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadoop-lzo-native</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> <name>hadoop-libhdfs</name> </package> </packages> @@ -347,11 +322,6 @@ <name>libsnappy-dev</name> </package> <package> - <name>liblzo2-2</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> <name>hadoop-hdfs</name> </package> <package> http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py index 4022986..89f9a1c 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py +++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py @@ -26,7 +26,6 @@ from resource_management.core.source import Template from resource_management.core.resources.service import ServiceConfig from resource_management.libraries.resources.xml_config import XmlConfig -from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages from resource_management.core.exceptions import Fail from resource_management.core.logger import Logger from resource_management.libraries.functions.format import format @@ -142,12 +141,6 @@ def hdfs(name=None): owner=tc_owner, content=Template("slaves.j2") ) - - if params.lzo_enabled: - lzo_packages = get_lzo_packages(params.stack_version_unformatted) - Package(lzo_packages, - retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability, - retry_count=params.agent_stack_retry_count) def install_snappy(): import params http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py index f215a1e..cd94244 100644 --- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py +++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py @@ -37,7 +37,6 @@ from resource_management.libraries.functions.copy_tarball import get_current_ver from resource_management.libraries.resources.xml_config import XmlConfig from resource_management.libraries.script.script import Script from resource_management.libraries.functions.security_commons import update_credential_provider_path -from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages from resource_management.core.resources.packaging import Package from resource_management.core.shell import as_user, as_sudo, call, checked_call from resource_management.core.exceptions import Fail @@ -306,11 +305,8 @@ def oozie_server_specific(upgrade_type): Execute(format('{sudo} chown {oozie_user}:{user_group} {oozie_libext_dir}/falcon-oozie-el-extension-*.jar'), not_if = no_op_test) + # just copying files is ok - we're not making assumptions about installing LZO here if params.lzo_enabled: - all_lzo_packages = get_lzo_packages(params.stack_version_unformatted) - Package(all_lzo_packages, - retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability, - retry_count=params.agent_stack_retry_count) Execute(format('{sudo} cp {hadoop_lib_home}/hadoop-lzo*.jar {oozie_lib_dir}'), not_if = no_op_test, ) http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py index 23b39ef..c9c07d1 100644 --- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py +++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py @@ -61,10 +61,15 @@ class OozieUpgrade(Script): # <stack-selector-tool> set hadoop-client has not run yet, therefore we cannot use # <stack-root>/current/hadoop-client ; we must use params.version directly # however, this only works when upgrading beyond 2.2.0.0; don't do this - # for downgrade to 2.2.0.0 since hadoop-lzo will not be present + # for downgrade to 2.2.0.0 since hadoop-lzo will not be present. + # # This can also be called during a Downgrade. + # # When a version is Installed, it is responsible for downloading the hadoop-lzo packages # if lzo is enabled. + # + # This block is just copying around files - there is no assumption about installation + # if params.lzo_enabled and (params.upgrade_direction == Direction.UPGRADE or target_version_needs_compression_libraries): hadoop_lzo_pattern = 'hadoop-lzo*.jar' hadoop_client_new_lib_dir = format("{stack_root}/{version}/hadoop/lib") http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py index a0f0672..40901b6 100644 --- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py +++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py @@ -387,3 +387,4 @@ is_webhdfs_enabled = config['configurations']['hdfs-site']['dfs.webhdfs.enabled' # The logic for LZO also exists in HDFS' params.py io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None) lzo_enabled = io_compression_codecs is not None and "com.hadoop.compression.lzo" in io_compression_codecs.lower() + http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/package/scripts/oozie.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/package/scripts/oozie.py b/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/package/scripts/oozie.py index 0771e93..19912fd 100644 --- a/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/package/scripts/oozie.py +++ b/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/package/scripts/oozie.py @@ -276,10 +276,6 @@ def oozie_server_specific(): not_if = no_op_test) if params.lzo_enabled: - all_lzo_packages = get_lzo_packages(params.stack_version_unformatted) - Package(all_lzo_packages, - retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability, - retry_count=params.agent_stack_retry_count) Execute(format('{sudo} cp {hadoop_lib_home}/hadoop-lzo*.jar {oozie_lib_dir}'), not_if = no_op_test, ) http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/package/scripts/oozie_server_upgrade.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/package/scripts/oozie_server_upgrade.py b/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/package/scripts/oozie_server_upgrade.py index 402c7cb..4fc1a42 100644 --- a/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/package/scripts/oozie_server_upgrade.py +++ b/ambari-server/src/main/resources/common-services/OOZIE/4.2.0.3.0/package/scripts/oozie_server_upgrade.py @@ -62,9 +62,14 @@ class OozieUpgrade(Script): # <stack-root>/current/hadoop-client ; we must use params.version directly # however, this only works when upgrading beyond 2.2.0.0; don't do this # for downgrade to 2.2.0.0 since hadoop-lzo will not be present + # # This can also be called during a Downgrade. + # # When a version is Installed, it is responsible for downloading the hadoop-lzo packages # if lzo is enabled. + # + # This block is just copying around files - there is no assumption about installation + # if params.lzo_enabled and (params.upgrade_direction == Direction.UPGRADE or target_version_needs_compression_libraries): hadoop_lzo_pattern = 'hadoop-lzo*.jar' hadoop_client_new_lib_dir = format("{stack_root}/{version}/hadoop/lib") http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py index ed3f5fd..dcf3e24 100644 --- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py +++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/tez_client.py @@ -19,22 +19,15 @@ Ambari Agent """ import os -import urlparse from ambari_commons import OSConst -from ambari_commons.inet_utils import download_file from ambari_commons.os_family_impl import OsFamilyImpl -from ambari_commons.os_utils import copy_file, extract_path_component from resource_management.core.exceptions import ClientComponentHasNoStatus -from resource_management.core.source import InlineTemplate from resource_management.libraries.functions import stack_select from resource_management.libraries.functions import StackFeature from resource_management.libraries.functions.stack_features import check_stack_feature -from resource_management.libraries.functions.get_stack_version import get_stack_version from resource_management.libraries.script.script import Script -from resource_management.libraries.functions.default import default -from resource_management.core.logger import Logger from tez import tez @@ -96,33 +89,8 @@ class TezClientWindows(TezClient): self.install_packages(env) params.refresh_tez_state_dependent_params() env.set_params(params) - self._install_lzo_support_if_needed(params) self.configure(env, config_dir=params.tez_conf_dir) - def _install_lzo_support_if_needed(self, params): - hadoop_classpath_prefix = self._expand_hadoop_classpath_prefix(params.hadoop_classpath_prefix_template, params.config['configurations']['tez-site']) - - hadoop_lzo_dest_path = extract_path_component(hadoop_classpath_prefix, "hadoop-lzo-") - if hadoop_lzo_dest_path: - hadoop_lzo_file = os.path.split(hadoop_lzo_dest_path)[1] - - config = Script.get_config() - file_url = urlparse.urljoin(config['hostLevelParams']['jdk_location'], hadoop_lzo_file) - hadoop_lzo_dl_path = os.path.join(config["hostLevelParams"]["agentCacheDir"], hadoop_lzo_file) - download_file(file_url, hadoop_lzo_dl_path) - #This is for protection against configuration changes. It will infect every new destination with the lzo jar, - # but since the classpath points to the jar directly we're getting away with it. - if not os.path.exists(hadoop_lzo_dest_path): - copy_file(hadoop_lzo_dl_path, hadoop_lzo_dest_path) - - def _expand_hadoop_classpath_prefix(self, hadoop_classpath_prefix_template, configurations): - import resource_management - - hadoop_classpath_prefix_obj = InlineTemplate(hadoop_classpath_prefix_template, configurations_dict=configurations, - extra_imports=[resource_management, resource_management.core, - resource_management.core.source]) - hadoop_classpath_prefix = hadoop_classpath_prefix_obj.get_content() - return hadoop_classpath_prefix if __name__ == "__main__": TezClient().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/tez_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/tez_client.py b/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/tez_client.py index b42d14e..0dfab4f 100644 --- a/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/tez_client.py +++ b/ambari-server/src/main/resources/common-services/TEZ/0.9.0.3.0/package/scripts/tez_client.py @@ -98,33 +98,7 @@ class TezClientWindows(TezClient): self.install_packages(env) params.refresh_tez_state_dependent_params() env.set_params(params) - self._install_lzo_support_if_needed(params) self.configure(env, config_dir=params.tez_conf_dir) - def _install_lzo_support_if_needed(self, params): - hadoop_classpath_prefix = self._expand_hadoop_classpath_prefix(params.hadoop_classpath_prefix_template, params.config['configurations']['tez-site']) - - hadoop_lzo_dest_path = extract_path_component(hadoop_classpath_prefix, "hadoop-lzo-") - if hadoop_lzo_dest_path: - hadoop_lzo_file = os.path.split(hadoop_lzo_dest_path)[1] - - config = Script.get_config() - file_url = urlparse.urljoin(config['hostLevelParams']['jdk_location'], hadoop_lzo_file) - hadoop_lzo_dl_path = os.path.join(config["hostLevelParams"]["agentCacheDir"], hadoop_lzo_file) - download_file(file_url, hadoop_lzo_dl_path) - #This is for protection against configuration changes. It will infect every new destination with the lzo jar, - # but since the classpath points to the jar directly we're getting away with it. - if not os.path.exists(hadoop_lzo_dest_path): - copy_file(hadoop_lzo_dl_path, hadoop_lzo_dest_path) - - def _expand_hadoop_classpath_prefix(self, hadoop_classpath_prefix_template, configurations): - import resource_management - - hadoop_classpath_prefix_obj = InlineTemplate(hadoop_classpath_prefix_template, configurations_dict=configurations, - extra_imports=[resource_management, resource_management.core, - resource_management.core.source]) - hadoop_classpath_prefix = hadoop_classpath_prefix_obj.get_content() - return hadoop_classpath_prefix - if __name__ == "__main__": TezClient().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/custom_actions/scripts/remove_bits.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/custom_actions/scripts/remove_bits.py b/ambari-server/src/main/resources/custom_actions/scripts/remove_bits.py index 0be9a34..078cf32 100644 --- a/ambari-server/src/main/resources/custom_actions/scripts/remove_bits.py +++ b/ambari-server/src/main/resources/custom_actions/scripts/remove_bits.py @@ -43,7 +43,7 @@ class RemoveBits(Script): Logger.info("Attempting to remove bits for HDP 2.1") config = Script.get_config() - packages_to_remove = ["zookeeper", "hadoop", "hadoop-lzo", "hadoop-hdfs", "hadoop-libhdfs", "hadoop-yarn", "hadoop-client", "hadoop-mapreduce", "hive", "hive-hcatalog", "hive-jdbc", "hive-webhcat", "hcatalog", "webhcat-tar-hive", "webhcat-tar-pig", "oozie", "oozie-client", "pig", "sqoop", "tez" "falcon", "storm", "flume", "hbase", "phoenix"] + packages_to_remove = ["zookeeper", "hadoop", "hadoop-hdfs", "hadoop-libhdfs", "hadoop-yarn", "hadoop-client", "hadoop-mapreduce", "hive", "hive-hcatalog", "hive-jdbc", "hive-webhcat", "hcatalog", "webhcat-tar-hive", "webhcat-tar-pig", "oozie", "oozie-client", "pig", "sqoop", "tez" "falcon", "storm", "flume", "hbase", "phoenix"] packages_to_remove.reverse() Logger.info("Packages to remove: {0}".format(" ".join(packages_to_remove))) http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/metainfo.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/metainfo.xml index 8500839..f473598 100644 --- a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/metainfo.xml +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/metainfo.xml @@ -180,10 +180,6 @@ <package> <name>hadoop</name> </package> - <package> - <name>hadoop-lzo</name> - <skipUpgrade>true</skipUpgrade> - </package> </packages> </osSpecific> @@ -197,14 +193,6 @@ <name>snappy-devel</name> </package> <package> - <name>lzo</name> - <skipUpgrade>true</skipUpgrade> - </package> - <package> - <name>hadoop-lzo-native</name> - <skipUpgrade>true</skipUpgrade> - </package> - <package> <name>hadoop-libhdfs</name> </package> </packages> @@ -220,10 +208,6 @@ <name>libsnappy-dev</name> </package> <package> - <name>liblzo2-2</name> - <skipUpgrade>true</skipUpgrade> - </package> - <package> <name>hadoop-hdfs</name> </package> <package> http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/params.py index 3a711c7..f16242c 100644 --- a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/params.py +++ b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/params.py @@ -209,11 +209,7 @@ HdfsDirectory = functools.partial( bin_dir = hadoop_bin_dir ) -io_compression_codecs = config['configurations']['core-site']['io.compression.codecs'] -if not "com.hadoop.compression.lzo" in io_compression_codecs: - exclude_packages = ["lzo", "hadoop-lzo", "hadoop-lzo-native", "liblzo2-2"] -else: - exclude_packages = [] +exclude_packages = [] name_node_params = default("/commandParams/namenode", None) #hadoop params http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml index 8c2ec8b..d7221b9 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HDFS/metainfo.xml @@ -40,19 +40,6 @@ <name>snappy-devel</name> </package> <package> - <name>lzo</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadooplzo_${stack_version}</name> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadooplzo_${stack_version}-native</name> - <condition>should_install_lzo</condition> - </package> - <package> <name>hadoop_${stack_version}-libhdfs</name> </package> <package> @@ -77,19 +64,6 @@ <name>snappy-devel</name> </package> <package> - <name>liblzo2-2</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadooplzo_${stack_version}</name> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadooplzo_${stack_version}-native</name> - <condition>should_install_lzo</condition> - </package> - <package> <name>hadoop_${stack_version}-libhdfs</name> </package> </packages> @@ -123,15 +97,6 @@ <name>libsnappy-dev</name> </package> <package> - <name>hadooplzo-${stack_version}</name> - <condition>should_install_lzo</condition> - </package> - <package> - <name>liblzo2-2</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> <name>libhdfs0-${stack_version}</name> </package> </packages> http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HDFS/metainfo.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HDFS/metainfo.xml index 15fe931..6a9fab61 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HDFS/metainfo.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HDFS/metainfo.xml @@ -36,13 +36,6 @@ <name>snappy-devel</name> </package> <package> - <name>lzo</name> - <skipUpgrade>true</skipUpgrade> - </package> - <package> - <name>hadooplzo_2_3_*</name> - </package> - <package> <name>hadoop_2_3_*-libhdfs</name> </package> </packages> @@ -76,9 +69,6 @@ <name>libsnappy-dev</name> </package> <package> - <name>hadooplzo-2-3-.*</name> - </package> - <package> <name>libhdfs0-2-3-.*</name> </package> </packages> http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml index ccf9a4e..86531cc 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/metainfo.xml @@ -72,19 +72,6 @@ <name>snappy-devel</name> </package> <package> - <name>lzo</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadooplzo_${stack_version}</name> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadooplzo_${stack_version}-native</name> - <condition>should_install_lzo</condition> - </package> - <package> <name>hadoop_${stack_version}-libhdfs</name> </package> <package> @@ -109,19 +96,6 @@ <name>snappy-devel</name> </package> <package> - <name>liblzo2-2</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadooplzo_${stack_version}</name> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadooplzo_${stack_version}-native</name> - <condition>should_install_lzo</condition> - </package> - <package> <name>hadoop_${stack_version}-libhdfs</name> </package> </packages> @@ -155,10 +129,6 @@ <name>libsnappy-dev</name> </package> <package> - <name>hadooplzo-${stack_version}</name> - <condition>should_install_lzo</condition> - </package> - <package> <name>libhdfs0-${stack_version}</name> </package> </packages> http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/metainfo.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/metainfo.xml b/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/metainfo.xml index 95a5f84..775508e 100644 --- a/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/metainfo.xml +++ b/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/metainfo.xml @@ -50,19 +50,6 @@ <name>snappy-devel</name> </package> <package> - <name>lzo</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadooplzo_${stack_version}</name> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadooplzo_${stack_version}-native</name> - <condition>should_install_lzo</condition> - </package> - <package> <name>hadoop_${stack_version}-libhdfs</name> </package> </packages> @@ -84,19 +71,6 @@ <name>snappy-devel</name> </package> <package> - <name>liblzo2-2</name> - <skipUpgrade>true</skipUpgrade> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadooplzo_${stack_version}</name> - <condition>should_install_lzo</condition> - </package> - <package> - <name>hadooplzo_${stack_version}-native</name> - <condition>should_install_lzo</condition> - </package> - <package> <name>hadoop_${stack_version}-libhdfs</name> </package> </packages> @@ -130,10 +104,6 @@ <name>libsnappy-dev</name> </package> <package> - <name>hadooplzo-${stack_version}</name> - <condition>should_install_lzo</condition> - </package> - <package> <name>libhdfs0-${stack_version}</name> </package> </packages> http://git-wip-us.apache.org/repos/asf/ambari/blob/5adcea78/ambari-server/src/test/python/custom_actions/TestRemoveBits.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/python/custom_actions/TestRemoveBits.py b/ambari-server/src/test/python/custom_actions/TestRemoveBits.py index 6537152..8217ca0 100644 --- a/ambari-server/src/test/python/custom_actions/TestRemoveBits.py +++ b/ambari-server/src/test/python/custom_actions/TestRemoveBits.py @@ -105,13 +105,10 @@ class TestRemoveBits(RMFTestCase): self.assertResourceCalled('Package', 'hadoop-hdfs', action = ['remove'], ) - self.assertResourceCalled('Package', 'hadoop-lzo', - action = ['remove'], - ) self.assertResourceCalled('Package', 'hadoop', action = ['remove'], ) self.assertResourceCalled('Package', 'zookeeper', action = ['remove'], ) - self.assertNoMoreResources() \ No newline at end of file + self.assertNoMoreResources()