AMBARI-21642. Add hadoop LZO jars if installed in the cluster for DRUID. (Slim 
Bouguerra via Swapan Shridhar)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5442231e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5442231e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5442231e

Branch: refs/heads/branch-feature-AMBARI-14714
Commit: 5442231e4e8e517f1e47f4c177cbe5799e8f96b8
Parents: ea2c432
Author: Swapan Shridhar <sshrid...@hortonworks.com>
Authored: Tue Aug 8 16:14:31 2017 -0700
Committer: Swapan Shridhar <sshrid...@hortonworks.com>
Committed: Tue Aug 8 16:14:31 2017 -0700

----------------------------------------------------------------------
 .../DRUID/0.9.2/package/scripts/druid.py               |  6 ++++++
 .../DRUID/0.9.2/package/scripts/params.py              | 13 +++++++++++++
 2 files changed, 19 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5442231e/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py
 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py
index cc02519..4e1ced1 100644
--- 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py
+++ 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid.py
@@ -113,6 +113,12 @@ def druid(upgrade_type=None, nodeType=None):
            
node_jvm_opts=druid_env_config[format('druid.{node_type_lowercase}.jvm.opts')])
          )
     Logger.info(format("Created druid-{node_type_lowercase} jvm.config"))
+    # Handling hadoop Lzo jars if enable and node type is hadoop related eg 
Overlords and MMs
+    if params.lzo_enabled and len(params.lzo_packages) > 0 and (
+            node_type == 'middleManager' or node_type == 'overlord'):
+        Logger.info(
+            format("Copying hadoop lzo jars from {hadoop_lib_home} to 
{hadoop_dependencies_dir}/hadoop-client/*/"))
+        Execute(format('{sudo} cp {hadoop_lib_home}/hadoop-lzo*.jar 
{hadoop_dependencies_dir}/hadoop-client/*/'))
 
   # All druid nodes have dependency on hdfs_client
   ensure_hadoop_directories()

http://git-wip-us.apache.org/repos/asf/ambari/blob/5442231e/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
index d47e604..9e766cd 100644
--- 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/params.py
@@ -18,6 +18,7 @@ limitations under the License.
 
 """
 from ambari_commons import OSCheck
+from resource_management.libraries.functions.get_lzo_packages import 
get_lzo_packages
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
@@ -26,6 +27,7 @@ from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.get_not_managed_resources import 
get_not_managed_resources
 from resource_management.libraries.functions.default import default
+from ambari_commons.constants import AMBARI_SUDO_BINARY
 
 import status_params
 
@@ -50,10 +52,14 @@ stack_name = default("/hostLevelParams/stack_name", None)
 # stack version
 stack_version = default("/commandParams/version", None)
 
+# un-formatted stack version
+stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
+
 # default role to coordinator needed for service checks
 component_directory = 
Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "DRUID_COORDINATOR")
 
 hostname = config['hostname']
+sudo = AMBARI_SUDO_BINARY
 
 # default druid parameters
 druid_home = format("{stack_root}/current/{component_directory}")
@@ -230,3 +236,10 @@ if not len(druid_router_hosts) == 0:
   druid_router_host = druid_router_hosts[0]
 druid_coordinator_port = 
config['configurations']['druid-coordinator']['druid.port']
 druid_router_port = config['configurations']['druid-router']['druid.port']
+
+# Create current Hadoop Clients  Libs
+stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
+io_compression_codecs = 
default("/configurations/core-site/io.compression.codecs", None)
+lzo_enabled = io_compression_codecs is not None and 
"com.hadoop.compression.lzo" in io_compression_codecs.lower()
+lzo_packages = get_lzo_packages(stack_version_unformatted)
+hadoop_lib_home = stack_root + '/' + stack_version + '/hadoop'

Reply via email to