Revert "AMBARI-21642. Add hadoop LZO jars if installed in the cluster for DRUID. (Slim Bouguerra via Swapan Shridhar)"
This reverts commit 5442231e4e8e517f1e47f4c177cbe5799e8f96b8. Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b4de6207 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b4de6207 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b4de6207 Branch: refs/heads/branch-feature-logsearch-ui Commit: b4de62079ae9c0533f85b34d7d49589e62119759 Parents: 78778ad Author: Swapan Shridhar <sshrid...@hortonworks.com> Authored: Wed Aug 9 10:32:40 2017 -0700 Committer: Swapan Shridhar <sshrid...@hortonworks.com> Committed: Wed Aug 9 10:32:40 2017 -0700 ---------------------------------------------------------------------- .../DRUID/0.9.2/package/scripts/druid.py | 6 ------ .../DRUID/0.9.2/package/scripts/params.py | 13 ------------- 2 files changed, 19 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/b4de6207/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py index 4e1ced1..cc02519 100644 --- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py +++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py @@ -113,12 +113,6 @@ def druid(upgrade_type=None, nodeType=None): node_jvm_opts=druid_env_config[format('druid.{node_type_lowercase}.jvm.opts')]) ) Logger.info(format("Created druid-{node_type_lowercase} jvm.config")) - # Handling hadoop Lzo jars if enable and node type is hadoop related eg Overlords and MMs - if params.lzo_enabled and len(params.lzo_packages) > 0 and ( - node_type == 'middleManager' or node_type == 'overlord'): - Logger.info( - format("Copying hadoop lzo jars from {hadoop_lib_home} to {hadoop_dependencies_dir}/hadoop-client/*/")) - Execute(format('{sudo} cp {hadoop_lib_home}/hadoop-lzo*.jar {hadoop_dependencies_dir}/hadoop-client/*/')) # All druid nodes have dependency on hdfs_client ensure_hadoop_directories() http://git-wip-us.apache.org/repos/asf/ambari/blob/b4de6207/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py index 9e766cd..d47e604 100644 --- a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py @@ -18,7 +18,6 @@ limitations under the License. """ from ambari_commons import OSCheck -from resource_management.libraries.functions.get_lzo_packages import get_lzo_packages from resource_management.libraries.functions import conf_select from resource_management.libraries.functions import stack_select from resource_management.libraries.resources.hdfs_resource import HdfsResource @@ -27,7 +26,6 @@ from resource_management.libraries.script.script import Script from resource_management.libraries.functions import format from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources from resource_management.libraries.functions.default import default -from ambari_commons.constants import AMBARI_SUDO_BINARY import status_params @@ -52,14 +50,10 @@ stack_name = default("/hostLevelParams/stack_name", None) # stack version stack_version = default("/commandParams/version", None) -# un-formatted stack version -stack_version_unformatted = str(config['hostLevelParams']['stack_version']) - # default role to coordinator needed for service checks component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "DRUID_COORDINATOR") hostname = config['hostname'] -sudo = AMBARI_SUDO_BINARY # default druid parameters druid_home = format("{stack_root}/current/{component_directory}") @@ -236,10 +230,3 @@ if not len(druid_router_hosts) == 0: druid_router_host = druid_router_hosts[0] druid_coordinator_port = config['configurations']['druid-coordinator']['druid.port'] druid_router_port = config['configurations']['druid-router']['druid.port'] - -# Create current Hadoop Clients Libs -stack_version_unformatted = str(config['hostLevelParams']['stack_version']) -io_compression_codecs = default("/configurations/core-site/io.compression.codecs", None) -lzo_enabled = io_compression_codecs is not None and "com.hadoop.compression.lzo" in io_compression_codecs.lower() -lzo_packages = get_lzo_packages(stack_version_unformatted) -hadoop_lib_home = stack_root + '/' + stack_version + '/hadoop'