http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql 
b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index ece6ef6..bc36e35 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -97,11 +97,30 @@ CREATE TABLE configuration_base (
   CONSTRAINT PK_configuration_base PRIMARY KEY (id)
 );
 
+CREATE TABLE clusterconfig (
+  config_id NUMBER(19) NOT NULL,
+  version_tag VARCHAR2(255) NOT NULL,
+  version NUMBER(19) NOT NULL,
+  type_name VARCHAR2(255) NOT NULL,
+  cluster_id NUMBER(19) NOT NULL,
+  stack_id NUMBER(19) NOT NULL,
+  selected NUMBER(1) DEFAULT 0 NOT NULL,
+  config_data CLOB NOT NULL,
+  config_attributes CLOB,
+  create_timestamp NUMBER(19) NOT NULL,
+  unmapped SMALLINT DEFAULT 0 NOT NULL,
+  selected_timestamp NUMBER(19) DEFAULT 0 NOT NULL,
+  CONSTRAINT PK_clusterconfig PRIMARY KEY (config_id),
+  CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES 
clusters (cluster_id),
+  CONSTRAINT FK_clusterconfig_stack_id FOREIGN KEY (stack_id) REFERENCES 
stack(stack_id),
+  CONSTRAINT UQ_config_type_tag UNIQUE (cluster_id, type_name, version_tag),
+  CONSTRAINT UQ_config_type_version UNIQUE (cluster_id, type_name, version));
+
 CREATE TABLE ambari_configuration (
-  id NUMBER(19) NOT NULL,
-  CONSTRAINT PK_ambari_configuration PRIMARY KEY (id),
-  CONSTRAINT FK_ambari_conf_conf_base FOREIGN KEY (id) REFERENCES 
configuration_base (id)
-);
+  category_name VARCHAR2(100) NOT NULL,
+  property_name VARCHAR2(100) NOT NULL,
+  property_value VARCHAR2(255) NOT NULL,
+  CONSTRAINT PK_ambari_configuration PRIMARY KEY (category_name, 
property_name));
 
 CREATE TABLE hosts (
   host_id NUMBER(19) NOT NULL,
@@ -1014,12 +1033,21 @@ CREATE TABLE kerberos_principal (
   CONSTRAINT PK_kerberos_principal PRIMARY KEY (principal_name)
 );
 
+CREATE TABLE kerberos_keytab (
+  keytab_path VARCHAR2(255) NOT NULL,
+  CONSTRAINT PK_krb_keytab_path_host_id PRIMARY KEY (keytab_path)
+);
+
 CREATE TABLE kerberos_principal_host (
   principal_name VARCHAR2(255) NOT NULL,
+  keytab_path VARCHAR2(255) NOT NULL,
+  is_distributed NUMBER(1) DEFAULT 0 NOT NULL,
   host_id NUMBER(19) NOT NULL,
-  CONSTRAINT PK_kerberos_principal_host PRIMARY KEY (principal_name, host_id),
+  CONSTRAINT PK_kerberos_principal_host PRIMARY KEY (principal_name, 
keytab_path, host_id),
   CONSTRAINT FK_krb_pr_host_id FOREIGN KEY (host_id) REFERENCES hosts 
(host_id),
-  CONSTRAINT FK_krb_pr_host_principalname FOREIGN KEY (principal_name) 
REFERENCES kerberos_principal (principal_name));
+  CONSTRAINT FK_krb_pr_host_principalname FOREIGN KEY (principal_name) 
REFERENCES kerberos_principal (principal_name),
+  CONSTRAINT FK_krb_pr_host_keytab_path FOREIGN KEY (keytab_path) REFERENCES 
kerberos_keytab (keytab_path)
+);
 
 CREATE TABLE kerberos_descriptor
 (
@@ -1209,7 +1237,6 @@ INSERT INTO ambari_sequences(sequence_name, 
sequence_value) values ('ambari_oper
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values 
('remote_cluster_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values 
('remote_cluster_service_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values 
('servicecomponent_version_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, sequence_value) values 
('configuration_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values 
('hostcomponentdesiredstate_id_seq', 0);
 
 INSERT INTO metainfo("metainfo_key", "metainfo_value") values ('version', 
'${ambariSchemaVersion}');

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql 
b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 14e8e6e..227645f 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -87,21 +87,11 @@ CREATE TABLE clusters (
   CONSTRAINT FK_clusters_resource_id FOREIGN KEY (resource_id) REFERENCES 
adminresource (resource_id)
 );
 
-CREATE TABLE configuration_base (
-  id BIGINT NOT NULL,
-  version_tag VARCHAR(255) NOT NULL,
-  version BIGINT NOT NULL,
-  type VARCHAR(255) NOT NULL,
-  data TEXT NOT NULL,
-  attributes TEXT,
-  create_timestamp BIGINT NOT NULL,
-  CONSTRAINT PK_configuration_base PRIMARY KEY (id)
-);
-
 CREATE TABLE ambari_configuration (
-  id BIGINT NOT NULL,
-  CONSTRAINT PK_ambari_configuration PRIMARY KEY (id),
-  CONSTRAINT FK_ambari_conf_conf_base FOREIGN KEY (id) REFERENCES 
configuration_base (id)
+  category_name VARCHAR(100) NOT NULL,
+  property_name VARCHAR(100) NOT NULL,
+  property_value VARCHAR(255) NOT NULL,
+  CONSTRAINT PK_ambari_configuration PRIMARY KEY (category_name, property_name)
 );
 
 CREATE TABLE hosts (
@@ -1019,12 +1009,19 @@ CREATE TABLE kerberos_principal (
   cached_keytab_path VARCHAR(255),
   CONSTRAINT PK_kerberos_principal PRIMARY KEY (principal_name));
 
+CREATE TABLE kerberos_keytab (
+  keytab_path VARCHAR(255) NOT NULL,
+  CONSTRAINT PK_krb_keytab_path_host_id PRIMARY KEY (keytab_path));
+
 CREATE TABLE kerberos_principal_host (
   principal_name VARCHAR(255) NOT NULL,
+  keytab_path VARCHAR(255) NOT NULL,
+  is_distributed SMALLINT NOT NULL DEFAULT 0,
   host_id BIGINT NOT NULL,
-  CONSTRAINT PK_kerberos_principal_host PRIMARY KEY (principal_name, host_id),
+  CONSTRAINT PK_kerberos_principal_host PRIMARY KEY (principal_name, 
keytab_path, host_id),
   CONSTRAINT FK_krb_pr_host_id FOREIGN KEY (host_id) REFERENCES hosts 
(host_id),
-  CONSTRAINT FK_krb_pr_host_principalname FOREIGN KEY (principal_name) 
REFERENCES kerberos_principal (principal_name));
+  CONSTRAINT FK_krb_pr_host_principalname FOREIGN KEY (principal_name) 
REFERENCES kerberos_principal (principal_name),
+  CONSTRAINT FK_krb_pr_host_keytab_path FOREIGN KEY (keytab_path) REFERENCES 
kerberos_keytab (keytab_path));
 
 CREATE TABLE kerberos_descriptor(
    kerberos_descriptor_name   VARCHAR(255) NOT NULL,
@@ -1214,7 +1211,6 @@ INSERT INTO ambari_sequences (sequence_name, 
sequence_value) VALUES
   ('remote_cluster_id_seq', 0),
   ('remote_cluster_service_id_seq', 0),
   ('servicecomponent_version_id_seq', 0),
-  ('configuration_id_seq', 0),
   ('hostcomponentdesiredstate_id_seq', 0);
 
 INSERT INTO adminresourcetype (resource_type_id, resource_type_name) VALUES

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql 
b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
index 994d1e7..333a0c3 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
@@ -96,11 +96,30 @@ CREATE TABLE configuration_base (
   CONSTRAINT PK_configuration_base PRIMARY KEY (id)
 );
 
+CREATE TABLE clusterconfig (
+  config_id NUMERIC(19) NOT NULL,
+  version_tag VARCHAR(255) NOT NULL,
+  version NUMERIC(19) NOT NULL,
+  type_name VARCHAR(255) NOT NULL,
+  cluster_id NUMERIC(19) NOT NULL,
+  stack_id NUMERIC(19) NOT NULL,
+  selected SMALLINT NOT NULL DEFAULT 0,
+  config_data TEXT NOT NULL,
+  config_attributes TEXT,
+  create_timestamp NUMERIC(19) NOT NULL,
+  unmapped SMALLINT NOT NULL DEFAULT 0,
+  selected_timestamp NUMERIC(19) NOT NULL DEFAULT 0,
+  CONSTRAINT PK_clusterconfig PRIMARY KEY (config_id),
+  CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES 
clusters (cluster_id),
+  CONSTRAINT FK_clusterconfig_stack_id FOREIGN KEY (stack_id) REFERENCES 
stack(stack_id),
+  CONSTRAINT UQ_config_type_tag UNIQUE (cluster_id, type_name, version_tag),
+  CONSTRAINT UQ_config_type_version UNIQUE (cluster_id, type_name, version));
+
 CREATE TABLE ambari_configuration (
-  id NUMERIC(19) NOT NULL,
-  CONSTRAINT PK_ambari_configuration PRIMARY KEY (id),
-  CONSTRAINT FK_ambari_conf_conf_base FOREIGN KEY (id) REFERENCES 
configuration_base (id)
-);
+  category_name VARCHAR(100) NOT NULL,
+  property_name VARCHAR(100) NOT NULL,
+  property_value VARCHAR(255) NOT NULL,
+  CONSTRAINT PK_ambari_configuration PRIMARY KEY (category_name, 
property_name));
 
 CREATE TABLE hosts (
   host_id NUMERIC(19) NOT NULL,
@@ -1014,12 +1033,21 @@ CREATE TABLE kerberos_principal (
   CONSTRAINT PK_kerberos_principal PRIMARY KEY (principal_name)
 );
 
+CREATE TABLE kerberos_keytab (
+  keytab_path VARCHAR(255) NOT NULL,
+  CONSTRAINT PK_krb_keytab_path_host_id PRIMARY KEY (keytab_path)
+);
+
 CREATE TABLE kerberos_principal_host (
   principal_name VARCHAR(255) NOT NULL,
+  keytab_path VARCHAR(255) NOT NULL,
+  is_distributed SMALLINT NOT NULL DEFAULT 0,
   host_id NUMERIC(19) NOT NULL,
-  CONSTRAINT PK_kerberos_principal_host PRIMARY KEY (principal_name, host_id),
+  CONSTRAINT PK_kerberos_principal_host PRIMARY KEY (principal_name, 
keytab_path, host_id),
   CONSTRAINT FK_krb_pr_host_id FOREIGN KEY (host_id) REFERENCES hosts 
(host_id),
-  CONSTRAINT FK_krb_pr_host_principalname FOREIGN KEY (principal_name) 
REFERENCES kerberos_principal (principal_name));
+  CONSTRAINT FK_krb_pr_host_principalname FOREIGN KEY (principal_name) 
REFERENCES kerberos_principal (principal_name),
+  CONSTRAINT FK_krb_pr_host_keytab_path FOREIGN KEY (keytab_path) REFERENCES 
kerberos_keytab (keytab_path)
+);
 
 CREATE TABLE kerberos_descriptor
 (
@@ -1209,7 +1237,6 @@ INSERT INTO ambari_sequences(sequence_name, 
sequence_value) values ('remote_clus
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values 
('remote_cluster_service_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values 
('servicecomponent_version_id_seq', 0);
 INSERT INTO ambari_sequences(sequence_name, sequence_value) values 
('hostcomponentdesiredstate_id_seq', 0);
-INSERT INTO ambari_sequences(sequence_name, sequence_value) values 
('configuration_id_seq', 0);
 
 insert into adminresourcetype (resource_type_id, resource_type_name)
   select 1, 'AMBARI'

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql 
b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
index a399bed..120bdb6 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
@@ -110,10 +110,30 @@ CREATE TABLE configuration_base (
   CONSTRAINT PK_configuration_base PRIMARY KEY (id)
 );
 
+CREATE TABLE clusterconfig (
+  config_id BIGINT NOT NULL,
+  version_tag VARCHAR(255) NOT NULL,
+  version BIGINT NOT NULL,
+  type_name VARCHAR(255) NOT NULL,
+  cluster_id BIGINT NOT NULL,
+  stack_id BIGINT NOT NULL,
+  selected SMALLINT NOT NULL DEFAULT 0,
+  config_data VARCHAR(MAX) NOT NULL,
+  config_attributes VARCHAR(MAX),
+  create_timestamp BIGINT NOT NULL,
+  unmapped SMALLINT NOT NULL DEFAULT 0,
+  selected_timestamp BIGINT NOT NULL DEFAULT 0,
+  CONSTRAINT PK_clusterconfig PRIMARY KEY CLUSTERED (config_id),
+  CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES 
clusters (cluster_id),
+  CONSTRAINT FK_clusterconfig_stack_id FOREIGN KEY (stack_id) REFERENCES 
stack(stack_id),
+  CONSTRAINT UQ_config_type_tag UNIQUE (cluster_id, type_name, version_tag),
+  CONSTRAINT UQ_config_type_version UNIQUE (cluster_id, type_name, version));
+
 CREATE TABLE ambari_configuration (
-  id BIGINT NOT NULL,
-  CONSTRAINT PK_ambari_configuration PRIMARY KEY (id),
-  CONSTRAINT FK_ambari_conf_conf_base FOREIGN KEY (id) REFERENCES 
configuration_base (id)
+  category_name VARCHAR(100) NOT NULL,
+  property_name VARCHAR(100) NOT NULL,
+  property_value VARCHAR(255) NOT NULL,
+  CONSTRAINT PK_ambari_configuration PRIMARY KEY (category_name, property_name)
 );
 
 CREATE TABLE hosts (
@@ -1037,12 +1057,21 @@ CREATE TABLE kerberos_principal (
   CONSTRAINT PK_kerberos_principal PRIMARY KEY CLUSTERED (principal_name)
 );
 
+CREATE TABLE kerberos_keytab (
+  keytab_path VARCHAR(255) NOT NULL,
+  CONSTRAINT PK_krb_keytab_path_host_id PRIMARY KEY CLUSTERED (keytab_path)
+);
+
 CREATE TABLE kerberos_principal_host (
   principal_name VARCHAR(255) NOT NULL,
+  keytab_path VARCHAR(255) NOT NULL,
+  is_distributed SMALLINT NOT NULL DEFAULT 0,
   host_id BIGINT NOT NULL,
-  CONSTRAINT PK_kerberos_principal_host PRIMARY KEY CLUSTERED (principal_name, 
host_id),
+  CONSTRAINT PK_kerberos_principal_host PRIMARY KEY CLUSTERED (principal_name, 
keytab_path, host_id),
   CONSTRAINT FK_krb_pr_host_id FOREIGN KEY (host_id) REFERENCES hosts 
(host_id),
-  CONSTRAINT FK_krb_pr_host_principalname FOREIGN KEY (principal_name) 
REFERENCES kerberos_principal (principal_name));
+  CONSTRAINT FK_krb_pr_host_principalname FOREIGN KEY (principal_name) 
REFERENCES kerberos_principal (principal_name),
+  CONSTRAINT FK_krb_pr_host_keytab_path FOREIGN KEY (keytab_path) REFERENCES 
kerberos_keytab (keytab_path)
+);
 
 CREATE TABLE kerberos_descriptor
 (
@@ -1234,7 +1263,6 @@ BEGIN TRANSACTION
     ('remote_cluster_id_seq', 0),
     ('remote_cluster_service_id_seq', 0),
     ('servicecomponent_version_id_seq', 0),
-    ('configuration_id_seq', 0),
     ('hostcomponentdesiredstate_id_seq', 0);
 
   insert into adminresourcetype (resource_type_id, resource_type_name)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/META-INF/persistence.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/META-INF/persistence.xml 
b/ambari-server/src/main/resources/META-INF/persistence.xml
index 680bdea..336dc7a 100644
--- a/ambari-server/src/main/resources/META-INF/persistence.xml
+++ b/ambari-server/src/main/resources/META-INF/persistence.xml
@@ -50,6 +50,7 @@
     <class>org.apache.ambari.server.orm.entities.HostStateEntity</class>
     <class>org.apache.ambari.server.orm.entities.HostVersionEntity</class>
     
<class>org.apache.ambari.server.orm.entities.KerberosPrincipalEntity</class>
+    <class>org.apache.ambari.server.orm.entities.KerberosKeytabEntity</class>
     
<class>org.apache.ambari.server.orm.entities.KerberosPrincipalHostEntity</class>
     <class>org.apache.ambari.server.orm.entities.KeyValueEntity</class>
     <class>org.apache.ambari.server.orm.entities.MemberEntity</class>
@@ -104,7 +105,6 @@
     
<class>org.apache.ambari.server.orm.entities.RemoteAmbariClusterServiceEntity</class>
     <class>org.apache.ambari.server.orm.entities.MpackEntity</class>
     <class>org.apache.ambari.server.orm.entities.RegistryEntity</class>
-    
<class>org.apache.ambari.server.orm.entities.ConfigurationBaseEntity</class>
     
<class>org.apache.ambari.server.orm.entities.AmbariConfigurationEntity</class>
 
     <properties>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
index 968ceed..31a866e 100644
--- 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
@@ -118,7 +118,7 @@ metadata_stop_script = 
format("{metadata_bin}/atlas_stop.py")
 log_dir = config['configurations']['atlas-env']['metadata_log_dir']
 
 # service locations
-hadoop_conf_dir = os.path.join(os.environ["HADOOP_HOME"], "conf") if 
'HADOOP_HOME' in os.environ else '/etc/hadoop/conf'
+hadoop_conf_dir = os.path.join(os.environ["HADOOP_HOME"], "conf") if 
'HADOOP_HOME' in os.environ else 
format('{stack_root}/current/hadoop-client/conf')
 
 # some commands may need to supply the JAAS location when running as atlas
 atlas_jaas_file = format("{conf_dir}/atlas_jaas.conf")

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/params.py
index b01884c..7c1249a 100644
--- 
a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/params.py
@@ -116,7 +116,7 @@ metadata_stop_script = 
format("{metadata_bin}/atlas_stop.py")
 log_dir = config['configurations']['atlas-env']['metadata_log_dir']
 
 # service locations
-hadoop_conf_dir = os.path.join(os.environ["HADOOP_HOME"], "conf") if 
'HADOOP_HOME' in os.environ else '/etc/hadoop/conf'
+hadoop_conf_dir = os.path.join(os.environ["HADOOP_HOME"], "conf") if 
'HADOOP_HOME' in os.environ else 
format('{stack_root}/current/hadoop-client/conf')
 
 # some commands may need to supply the JAAS location when running as atlas
 atlas_jaas_file = format("{conf_dir}/atlas_jaas.conf")

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/druid.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/druid.py
 
b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/druid.py
index ec98c3c..bb872b9 100644
--- 
a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/druid.py
+++ 
b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/druid.py
@@ -115,8 +115,7 @@ def druid(upgrade_type=None, nodeType=None):
          )
     Logger.info(format("Created druid-{node_type_lowercase} jvm.config"))
     # Handling hadoop Lzo jars if enable and node type is hadoop related eg 
Overlords and MMs
-    if ['middleManager', 'overlord'].__contains__(node_type_lowercase) and 
params.lzo_enabled and len(
-            params.lzo_packages) > 0:
+    if ['middleManager', 'overlord'].__contains__(node_type_lowercase) and 
params.lzo_enabled:
         try:
             Logger.info(
                 format(

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
index fd1cde6..519dfbf 100644
--- 
a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
@@ -18,7 +18,6 @@ limitations under the License.
 
 """
 from ambari_commons import OSCheck
-from resource_management.libraries.functions.get_lzo_packages import 
get_lzo_packages
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
@@ -27,6 +26,7 @@ from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.get_not_managed_resources import 
get_not_managed_resources
 from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.lzo_utils import 
should_install_lzo
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 
 import status_params
@@ -195,6 +195,5 @@ if has_metric_collector:
 # Create current Hadoop Clients  Libs
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 io_compression_codecs = 
default("/configurations/core-site/io.compression.codecs", None)
-lzo_enabled = io_compression_codecs is not None and 
"com.hadoop.compression.lzo" in io_compression_codecs.lower()
-lzo_packages = get_lzo_packages(stack_version_unformatted)
+lzo_enabled = should_install_lzo()
 hadoop_lib_home = stack_root + '/' + stack_version + '/hadoop/lib'

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
index 933515b..7d8fa13 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
@@ -209,12 +209,6 @@ def falcon(type, action = None, upgrade_type=None):
           owner = params.falcon_user,
           create_parents = True)
 
-    # although Falcon's falcon-config.sh will use 'which hadoop' to figure
-    # this out, in an upgraded cluster, it's possible that 'which hadoop'
-    # still points to older binaries; it's safer to just pass in the
-    # hadoop home directory to use
-    environment_dictionary = { "HADOOP_HOME" : params.hadoop_home_dir }
-
     pid = get_user_call_output.get_user_call_output(format("cat 
{server_pid_file}"), user=params.falcon_user, is_checked_call=False)[1]
     process_exists = format("ls {server_pid_file} && ps -p {pid}")
 
@@ -223,7 +217,6 @@ def falcon(type, action = None, upgrade_type=None):
         Execute(format('{falcon_home}/bin/falcon-config.sh server falcon'),
           user = params.falcon_user,
           path = params.hadoop_bin_dir,
-          environment=environment_dictionary,
           not_if = process_exists,
         )
       except:
@@ -253,7 +246,6 @@ in the Falcon documentation.
         Execute(format('{falcon_home}/bin/falcon-start -port {falcon_port}'),
           user = params.falcon_user,
           path = params.hadoop_bin_dir,
-          environment=environment_dictionary,
           not_if = process_exists,
         )
       except:
@@ -264,8 +256,7 @@ in the Falcon documentation.
       try:
         Execute(format('{falcon_home}/bin/falcon-stop'),
           user = params.falcon_user,
-          path = params.hadoop_bin_dir,
-          environment=environment_dictionary)
+          path = params.hadoop_bin_dir)
       except:
         show_logs(params.falcon_log_dir, params.falcon_user)
         raise

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/service_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/service_advisor.py
 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/service_advisor.py
index 5c67a02..e5f4dd0 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/service_advisor.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/service_advisor.py
@@ -461,6 +461,16 @@ class HBASERecommender(service_advisor.ServiceAdvisor):
     else:
       putHbaseSiteProperty('hbase.master.ui.readonly', 'false')
 
+  """
+  Returns the list of Phoenix Query Server host names, or None.
+  """
+  def get_phoenix_query_server_hosts(self, services, hosts):
+    if len(hosts['items']) > 0:
+      phoenix_query_server_hosts = self.getHostsWithComponent("HBASE", 
"PHOENIX_QUERY_SERVER", services, hosts)
+      if phoenix_query_server_hosts is None:
+        return []
+      return [host['Hosts']['host_name'] for host in 
phoenix_query_server_hosts]
+
 
   def recommendHBASEConfigurationsFromHDP26(self, configurations, clusterData, 
services, hosts):
     if 'hbase-env' in services['configurations'] and 'hbase_user' in 
services['configurations']['hbase-env']['properties']:
@@ -672,4 +682,4 @@ class HBASEValidator(service_advisor.ServiceAdvisor):
                                   " {0} needs to contain {1} instead of 
{2}".format(prop_name,prop_val,exclude_val))})
 
     validationProblems = 
self.toConfigurationValidationProblems(validationItems, "hbase-site")
-    return validationProblems
\ No newline at end of file
+    return validationProblems

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml
index 9979de4..6bbb583 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/metainfo.xml
@@ -244,11 +244,6 @@
             <package>
               <name>hadoop</name>
             </package>
-            <package>
-              <name>hadoop-lzo</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_lzo</condition>
-            </package>
           </packages>
         </osSpecific>
         
@@ -265,16 +260,6 @@
               <name>snappy-devel</name>
             </package>
             <package>
-              <name>lzo</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_lzo</condition>
-            </package>
-            <package>
-              <name>hadoop-lzo-native</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_lzo</condition>
-            </package>
-            <package>
               <name>hadoop-libhdfs</name>
             </package>
             <package>
@@ -296,16 +281,6 @@
               <name>snappy-devel</name>
             </package>
             <package>
-              <name>liblzo2-2</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_lzo</condition>
-            </package>
-            <package>
-              <name>hadoop-lzo-native</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_lzo</condition>
-            </package>
-            <package>
               <name>hadoop-libhdfs</name>
             </package>
           </packages>
@@ -324,11 +299,6 @@
               <name>libsnappy-dev</name>
             </package>
             <package>
-              <name>liblzo2-2</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_lzo</condition>
-            </package>
-            <package>
               <name>hadoop-hdfs</name>
             </package>
             <package>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
index 4022986..d3d0cf8 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
@@ -26,13 +26,13 @@ from resource_management.core.source import Template
 from resource_management.core.resources.service import ServiceConfig
 from resource_management.libraries.resources.xml_config import XmlConfig
 
-from resource_management.libraries.functions.get_lzo_packages import 
get_lzo_packages
 from resource_management.core.exceptions import Fail
 from resource_management.core.logger import Logger
 from resource_management.libraries.functions.format import format
 import os
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
+from resource_management.libraries.functions.lzo_utils import 
install_lzo_if_needed
 
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def hdfs(name=None):
@@ -143,11 +143,7 @@ def hdfs(name=None):
        content=Template("slaves.j2")
   )
   
-  if params.lzo_enabled:
-    lzo_packages = get_lzo_packages(params.stack_version_unformatted)
-    Package(lzo_packages,
-            
retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability,
-            retry_count=params.agent_stack_retry_count)
+  install_lzo_if_needed()
       
 def install_snappy():
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py
index 235f231..dc3279f 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/install_params.py
@@ -27,7 +27,3 @@ else:
 
   _config = Script.get_config()
   stack_version_unformatted = str(_config['hostLevelParams']['stack_version'])
-
-  # The logic for LZO also exists in OOZIE's params.py
-  io_compression_codecs = 
default("/configurations/core-site/io.compression.codecs", None)
-  lzo_enabled = io_compression_codecs is not None and 
"com.hadoop.compression.lzo" in io_compression_codecs.lower()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index bb6349b..e2790e1 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -383,11 +383,6 @@ HdfsResource = functools.partial(
   immutable_paths = get_not_managed_resources(),
   dfs_type = dfs_type
 )
-
-
-# The logic for LZO also exists in OOZIE's params.py
-io_compression_codecs = 
default("/configurations/core-site/io.compression.codecs", None)
-lzo_enabled = io_compression_codecs is not None and 
"com.hadoop.compression.lzo" in io_compression_codecs.lower()
   
 name_node_params = default("/commandParams/namenode", None)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/metainfo.xml 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/metainfo.xml
index e6d1166..0c629f3 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/metainfo.xml
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/metainfo.xml
@@ -270,11 +270,6 @@
             <package>
               <name>hadoop</name>
             </package>
-            <package>
-              <name>hadoop-lzo</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_lzo</condition>
-            </package>
           </packages>
         </osSpecific>
         
@@ -291,16 +286,6 @@
               <name>snappy-devel</name>
             </package>
             <package>
-              <name>lzo</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_lzo</condition>
-            </package>
-            <package>
-              <name>hadoop-lzo-native</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_lzo</condition>
-            </package>
-            <package>
               <name>hadoop-libhdfs</name>
             </package>
           </packages>
@@ -319,16 +304,6 @@
               <name>snappy-devel</name>
             </package>
             <package>
-              <name>liblzo2-2</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_lzo</condition>
-            </package>
-            <package>
-              <name>hadoop-lzo-native</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_lzo</condition>
-            </package>
-            <package>
               <name>hadoop-libhdfs</name>
             </package>
           </packages>
@@ -347,11 +322,6 @@
               <name>libsnappy-dev</name>
             </package>
             <package>
-              <name>liblzo2-2</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_lzo</condition>
-            </package>
-            <package>
               <name>hadoop-hdfs</name>
             </package>
             <package>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
index 4022986..d3d0cf8 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
@@ -26,13 +26,13 @@ from resource_management.core.source import Template
 from resource_management.core.resources.service import ServiceConfig
 from resource_management.libraries.resources.xml_config import XmlConfig
 
-from resource_management.libraries.functions.get_lzo_packages import 
get_lzo_packages
 from resource_management.core.exceptions import Fail
 from resource_management.core.logger import Logger
 from resource_management.libraries.functions.format import format
 import os
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
+from resource_management.libraries.functions.lzo_utils import 
install_lzo_if_needed
 
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def hdfs(name=None):
@@ -143,11 +143,7 @@ def hdfs(name=None):
        content=Template("slaves.j2")
   )
   
-  if params.lzo_enabled:
-    lzo_packages = get_lzo_packages(params.stack_version_unformatted)
-    Package(lzo_packages,
-            
retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability,
-            retry_count=params.agent_stack_retry_count)
+  install_lzo_if_needed()
       
 def install_snappy():
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/install_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/install_params.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/install_params.py
index 235f231..dc3279f 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/install_params.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/install_params.py
@@ -27,7 +27,3 @@ else:
 
   _config = Script.get_config()
   stack_version_unformatted = str(_config['hostLevelParams']['stack_version'])
-
-  # The logic for LZO also exists in OOZIE's params.py
-  io_compression_codecs = 
default("/configurations/core-site/io.compression.codecs", None)
-  lzo_enabled = io_compression_codecs is not None and 
"com.hadoop.compression.lzo" in io_compression_codecs.lower()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py
index 2fa6208..cbe7943 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/params_linux.py
@@ -373,10 +373,6 @@ HdfsResource = functools.partial(
   dfs_type = dfs_type
 )
 
-
-# The logic for LZO also exists in OOZIE's params.py
-io_compression_codecs = 
default("/configurations/core-site/io.compression.codecs", None)
-lzo_enabled = io_compression_codecs is not None and 
"com.hadoop.compression.lzo" in io_compression_codecs.lower()
   
 name_node_params = default("/commandParams/namenode", None)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml
index 762530b..b7af252 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-site.xml
@@ -287,7 +287,7 @@ limitations under the License.
   </property>
   <property>
     <name>hive.auto.convert.sortmerge.join</name>
-    <value>true</value>
+    <value>false</value>
     <description>Will the join be automatically converted to a sort-merge 
join, if the joined tables pass
       the criteria for sort-merge join.
     </description>
@@ -480,6 +480,12 @@ limitations under the License.
     <value-attributes>
       <empty-value-valid>true</empty-value-valid>
     </value-attributes>
+    <depends-on>
+      <property>
+        <type>beacon-env</type>
+        <name>set_hive_configs</name>
+      </property>
+    </depends-on>
     <on-ambari-upgrade add="false"/>
   </property>
   <property>
@@ -489,6 +495,12 @@ limitations under the License.
     <value-attributes>
       <empty-value-valid>true</empty-value-valid>
     </value-attributes>
+    <depends-on>
+      <property>
+        <type>beacon-env</type>
+        <name>set_hive_configs</name>
+      </property>
+    </depends-on>
     <on-ambari-upgrade add="false"/>
   </property>
   <property>
@@ -498,6 +510,12 @@ limitations under the License.
     <value-attributes>
       <empty-value-valid>true</empty-value-valid>
     </value-attributes>
+    <depends-on>
+      <property>
+        <type>beacon-env</type>
+        <name>set_hive_configs</name>
+      </property>
+    </depends-on>
     <on-ambari-upgrade add="false"/>
   </property>
   <property>
@@ -507,6 +525,16 @@ limitations under the License.
     <value-attributes>
       <empty-value-valid>true</empty-value-valid>
     </value-attributes>
+    <depends-on>
+      <property>
+        <type>beacon-env</type>
+        <name>set_hive_configs</name>
+      </property>
+      <property>
+        <type>hive-site</type>
+        <name>hive.metastore.warehouse.dir</name>
+      </property>
+    </depends-on>
     <on-ambari-upgrade add="false"/>
   </property>
   <property>
@@ -516,6 +544,16 @@ limitations under the License.
     <value-attributes>
       <empty-value-valid>true</empty-value-valid>
     </value-attributes>
+    <depends-on>
+      <property>
+        <type>beacon-env</type>
+        <name>set_hive_configs</name>
+      </property>
+      <property>
+        <type>hive-site</type>
+        <name>hive.metastore.warehouse.dir</name>
+      </property>
+    </depends-on>
     <on-ambari-upgrade add="false"/>
   </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/metainfo.xml 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/metainfo.xml
index d84a85d..c2c8189 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/metainfo.xml
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/metainfo.xml
@@ -276,11 +276,6 @@
             <package>
               <name>webhcat-tar-pig</name>
             </package>
-            <package>
-              <name>mysql-connector-java</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_mysql_connector</condition>
-            </package>
           </packages>
         </osSpecific>
         <osSpecific>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_thrift_port.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_thrift_port.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_thrift_port.py
index 3560bf8..6db92b0 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_thrift_port.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/alerts/alert_hive_thrift_port.py
@@ -28,6 +28,7 @@ from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
 from ambari_commons.os_check import OSConst
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
+from resource_management.core.signal_utils import TerminateStrategy
 
 OK_MESSAGE = "TCP OK - {0:.3f}s response on port {1}"
 CRITICAL_MESSAGE = "Connection failed on host {0}:{1} ({2})"
@@ -271,7 +272,7 @@ def execute(configurations={}, parameters={}, 
host_name=None):
 
     start_time = time.time()
     try:
-      Execute(cmd, user=hiveuser, timeout=30)
+      Execute(cmd, user=hiveuser, timeout=30, 
timeout_kill_strategy=TerminateStrategy.KILL_PROCESS_TREE)
       total_time = time.time() - start_time
       result_code = 'OK'
       label = OK_MESSAGE.format(total_time, port)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/startMetastore.sh
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/startMetastore.sh
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/startMetastore.sh
index 86541f0..5a556b2 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/startMetastore.sh
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/startMetastore.sh
@@ -19,7 +19,7 @@
 # under the License.
 #
 #
-HIVE_BIN=${HIVE_BIN:-"hive"}
+HIVE_CMD=${HIVE_CMD:-"hive"}
 
-HIVE_CONF_DIR=$4 $HIVE_BIN --service metastore -hiveconf 
hive.log.file=hivemetastore.log -hiveconf hive.log.dir=$5 > $1 2> $2 &
+HIVE_CONF_DIR=$4 $HIVE_CMD --service metastore -hiveconf 
hive.log.file=hivemetastore.log -hiveconf hive.log.dir=$5 > $1 2> $2 &
 echo $!|cat>$3

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index c4b34a5..d9cc55f 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -24,7 +24,7 @@ from urlparse import urlparse
 
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
+from resource_management.libraries.functions import copy_tarball
 from resource_management.libraries.functions.get_config import get_config
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
@@ -42,6 +42,7 @@ from resource_management.core.logger import Logger
 from resource_management.core import utils
 from resource_management.libraries.functions.setup_atlas_hook import 
has_atlas_in_cluster, setup_atlas_hook
 from resource_management.libraries.functions.security_commons import 
update_credential_provider_path
+from resource_management.libraries.functions.lzo_utils import 
install_lzo_if_needed
 from ambari_commons.constants import SERVICE
 
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -50,6 +51,8 @@ from ambari_commons import OSConst
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def hive(name=None):
   import params
+  
+  install_lzo_if_needed()
 
   hive_client_conf_path = 
format("{stack_root}/current/{component_directory}/conf")
   # Permissions 644 for conf dir (client) files, and 600 for conf.server
@@ -174,19 +177,19 @@ def setup_hiveserver2():
   # *********************************
   #  if copy tarball to HDFS feature  supported copy mapreduce.tar.gz and 
tez.tar.gz to HDFS
   if params.stack_version_formatted_major and 
check_stack_feature(StackFeature.COPY_TARBALL_TO_HDFS, 
params.stack_version_formatted_major):
-    copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
-    copy_to_hdfs("tez", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
+    copy_tarball.copy_to_hdfs("mapreduce", params.user_group, 
params.hdfs_user, skip=params.sysprep_skip_copy_tarballs_hdfs)
+    copy_tarball.copy_to_hdfs("tez", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
 
   # Always copy pig.tar.gz and hive.tar.gz using the appropriate mode.
   # This can use a different source and dest location to account
-  copy_to_hdfs("pig",
+  copy_tarball.copy_to_hdfs("pig",
                params.user_group,
                params.hdfs_user,
                file_mode=params.tarballs_mode,
                custom_source_file=params.pig_tar_source,
                custom_dest_file=params.pig_tar_dest_file,
                skip=params.sysprep_skip_copy_tarballs_hdfs)
-  copy_to_hdfs("hive",
+  copy_tarball.copy_to_hdfs("hive",
                params.user_group,
                params.hdfs_user,
                file_mode=params.tarballs_mode,
@@ -207,7 +210,7 @@ def setup_hiveserver2():
       src_filename = os.path.basename(source_file)
       dest_file = os.path.join(dest_dir, src_filename)
 
-      copy_to_hdfs(tarball_name,
+      copy_tarball.copy_to_hdfs(tarball_name,
                    params.user_group,
                    params.hdfs_user,
                    file_mode=params.tarballs_mode,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
index c0b152e..caa3e9b 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
@@ -415,13 +415,14 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
 
 
     """
-    Remove extra lines from 'llapstatus' status output (eg: because of MOTD 
logging) so as to have a valid JSON data to be passed in
-    to JSON converter.
+    Remove extra lines (begginning/end) from 'llapstatus' status output (eg: 
because of MOTD logging) so as to have 
+    a valid JSON data to be passed in to JSON converter.
     """
     def _make_valid_json(self, output):
       '''
 
-      Note: It is assumed right now that extra lines will be only at the start 
and not at the end.
+      Note: Extra lines (eg: because of MOTD) may be at the start or the end 
(some other logging getting appended)
+      of the passed-in data.
 
       Sample expected JSON to be passed for 'loads' is either of the form :
 
@@ -457,6 +458,19 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
       if (len_splits < 3):
         raise Fail ("Malformed JSON data received from 'llapstatus' command. 
Exiting ....")
 
+      # Firstly, remove extra lines from the END.
+      updated_splits = []
+      for itr, line in enumerate(reversed(splits)):
+        if line == "}": # Our assumption of end of JSON data.
+          updated_splits = splits[:-itr]
+          break
+
+      if len(updated_splits) > 0:
+        splits = updated_splits
+        len_splits = len(splits)
+
+
+      # Secondly, remove extra lines from the BEGGINNING.
       marker_idx = None # To detect where from to start reading for JSON data
       for idx, split in enumerate(splits):
         curr_elem = split.strip()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
index 2412bf9..05aedc1 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py
@@ -76,8 +76,6 @@ def hive_service(name, action='start', upgrade_type=None):
       check_fs_root(params.hive_server_conf_dir, params.execute_path)
 
     daemon_cmd = cmd
-    hadoop_home = params.hadoop_home
-    hive_bin = "hive"
 
     # upgrading hiveserver2 (rolling_restart) means that there is an existing,
     # de-registering hiveserver2; the pid will still exist, but the new
@@ -85,13 +83,9 @@ def hive_service(name, action='start', upgrade_type=None):
     if upgrade_type == UPGRADE_TYPE_ROLLING:
       process_id_exists_command = None
 
-      if params.version and params.stack_root:
-        hadoop_home = format("{stack_root}/{version}/hadoop")
-        hive_bin = os.path.join(params.hive_bin, hive_bin)
-      
-    Execute(daemon_cmd, 
+    Execute(daemon_cmd,
       user = params.hive_user,
-      environment = { 'HADOOP_HOME': hadoop_home, 'JAVA_HOME': 
params.java64_home, 'HIVE_BIN': hive_bin },
+      environment = { 'JAVA_HOME': params.java64_home, 'HIVE_CMD': 
params.hive_cmd },
       path = params.execute_path,
       not_if = process_id_exists_command)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service_interactive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service_interactive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service_interactive.py
index 703d104..71c22d7 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service_interactive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service_interactive.py
@@ -52,12 +52,11 @@ def hive_service_interactive(name, action='start', 
upgrade_type=None):
   if action == 'start':
     check_fs_root(params.hive_server_interactive_conf_dir, 
params.execute_path_hive_interactive)
     daemon_cmd = cmd
-    hadoop_home = params.hadoop_home
-    hive_interactive_bin = "hive2"
+    hive_interactive_bin = 
format("{stack_root}/current/hive-server2-hive2/bin/hive2")
 
     Execute(daemon_cmd,
             user = params.hive_user,
-            environment = { 'HADOOP_HOME': hadoop_home, 'JAVA_HOME': 
params.java64_home, 'HIVE_BIN': hive_interactive_bin },
+            environment = { 'JAVA_HOME': params.java64_home, 'HIVE_BIN': 
hive_interactive_bin },
             path = params.execute_path,
             not_if = process_id_exists_command)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 16e1a71..ea8beaf 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -112,6 +112,7 @@ component_directory_interactive = 
status_params.component_directory_interactive
 hadoop_home = stack_select.get_hadoop_dir("home")
 
 hive_bin = format('{stack_root}/current/{component_directory}/bin')
+hive_cmd = os.path.join(hive_bin, "hive")
 hive_schematool_ver_bin = format('{stack_root}/{version}/hive/bin')
 hive_schematool_bin = format('{stack_root}/current/{component_directory}/bin')
 hive_lib = format('{stack_root}/current/{component_directory}/lib')
@@ -186,8 +187,6 @@ hive_server_conf_dir = status_params.hive_server_conf_dir
 
 hcat_conf_dir = '/etc/hive-hcatalog/conf'
 config_dir = '/etc/hive-webhcat/conf'
-hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
-webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
 
 # there are no client versions of these, use server versions directly
 hcat_lib = format('{stack_root}/current/hive-webhcat/share/hcatalog')

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
index 271fff9..852d7bf 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/service_check.py
@@ -171,10 +171,8 @@ class HiveServiceCheckDefault(HiveServiceCheck):
     if kinit_cmd:
       beeline_url.append('principal={key}')
 
-    exec_path = params.execute_path
-    if params.version:
-      upgrade_hive_bin = format("{stack_root}/{version}/hive2/bin")
-      exec_path =  os.environ['PATH'] + os.pathsep + params.hadoop_bin_dir + 
os.pathsep + upgrade_hive_bin
+    hive_interactive_bin = 
format("{stack_root}/current/hive-server2-hive2/bin")
+    exec_path =  os.environ['PATH'] + os.pathsep + params.hadoop_bin_dir + 
os.pathsep + hive_interactive_bin
 
     # beeline path
     llap_cmd = "! beeline -u '%s'" % format(";".join(beeline_url))

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
index cb4aafd..bddb5b2 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service.py
@@ -40,30 +40,22 @@ def webhcat_service(action='start', rolling_restart=False):
 def webhcat_service(action='start', upgrade_type=None):
   import params
 
-  environ = {
-    'HADOOP_HOME': params.hadoop_home
-  }
-
   cmd = format('{webhcat_bin_dir}/webhcat_server.sh')
 
   if action == 'start':
-    if upgrade_type is not None and params.version and params.stack_root:
-      environ['HADOOP_HOME'] = format("{stack_root}/{version}/hadoop")
-
     daemon_cmd = format('cd {hcat_pid_dir} ; {cmd} start')
     no_op_test = format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps -p `cat 
{webhcat_pid_file}` >/dev/null 2>&1')
     try:
       Execute(daemon_cmd,
               user=params.webhcat_user,
-              not_if=no_op_test,
-              environment = environ)
+              not_if=no_op_test)
     except:
       show_logs(params.hcat_log_dir, params.webhcat_user)
       raise
   elif action == 'stop':
     try:
       # try stopping WebHCat using its own script
-      graceful_stop(cmd, environ)
+      graceful_stop(cmd)
     except Fail:
       show_logs(params.hcat_log_dir, params.webhcat_user)
       Logger.info(traceback.format_exc())
@@ -95,17 +87,14 @@ def webhcat_service(action='start', upgrade_type=None):
 
     File(params.webhcat_pid_file, action="delete")
 
-def graceful_stop(cmd, environ):
+def graceful_stop(cmd):
   """
   Attemps to stop WebHCat using its own shell script. On some versions this 
may not correctly
   stop the daemon.
   :param cmd: the command to run to stop the daemon
-  :param environ: the environment variables to execute the command with
   :return:
   """
   import params
   daemon_cmd = format('{cmd} stop')
 
-  Execute(daemon_cmd,
-          user = params.webhcat_user,
-          environment = environ)
+  Execute(daemon_cmd, user = params.webhcat_user)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/metainfo.xml 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/metainfo.xml
index db490c9..48dda31 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/metainfo.xml
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/metainfo.xml
@@ -369,16 +369,6 @@
 
       <osSpecifics>
         <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>mysql-connector-java</name>
-              <skipUpgrade>true</skipUpgrade>
-              <condition>should_install_mysql_connector</condition>
-            </package>
-          </packages>
-        </osSpecific>
-        <osSpecific>
           <osFamily>redhat7,amazon2015,redhat6,suse11,suse12</osFamily>
           <packages>
             <package>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/files/startMetastore.sh
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/files/startMetastore.sh
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/files/startMetastore.sh
index 86541f0..5a556b2 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/files/startMetastore.sh
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/files/startMetastore.sh
@@ -19,7 +19,7 @@
 # under the License.
 #
 #
-HIVE_BIN=${HIVE_BIN:-"hive"}
+HIVE_CMD=${HIVE_CMD:-"hive"}
 
-HIVE_CONF_DIR=$4 $HIVE_BIN --service metastore -hiveconf 
hive.log.file=hivemetastore.log -hiveconf hive.log.dir=$5 > $1 2> $2 &
+HIVE_CONF_DIR=$4 $HIVE_CMD --service metastore -hiveconf 
hive.log.file=hivemetastore.log -hiveconf hive.log.dir=$5 > $1 2> $2 &
 echo $!|cat>$3

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive.py
index 22ff9fd..1724bae 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive.py
@@ -42,6 +42,7 @@ from resource_management.core.logger import Logger
 from resource_management.core import utils
 from resource_management.libraries.functions.setup_atlas_hook import 
has_atlas_in_cluster, setup_atlas_hook
 from resource_management.libraries.functions.security_commons import 
update_credential_provider_path
+from resource_management.libraries.functions.lzo_utils import 
install_lzo_if_needed
 from ambari_commons.constants import SERVICE
 
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -50,6 +51,8 @@ from ambari_commons import OSConst
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def hive(name=None):
   import params
+  
+  install_lzo_if_needed()
 
   hive_client_conf_path = 
format("{stack_root}/current/{component_directory}/conf")
   # Permissions 644 for conf dir (client) files, and 600 for conf.server

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service.py
index 1f2b644..80471ea 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service.py
@@ -76,8 +76,6 @@ def hive_service(name, action='start', upgrade_type=None):
       check_fs_root(params.hive_server_conf_dir, params.execute_path)
 
     daemon_cmd = cmd
-    hadoop_home = params.hadoop_home
-    hive_bin = "hive"
 
     # upgrading hiveserver2 (rolling_restart) means that there is an existing,
     # de-registering hiveserver2; the pid will still exist, but the new
@@ -85,13 +83,9 @@ def hive_service(name, action='start', upgrade_type=None):
     if upgrade_type == UPGRADE_TYPE_ROLLING:
       process_id_exists_command = None
 
-      if params.version and params.stack_root:
-        hadoop_home = format("{stack_root}/{version}/hadoop")
-        hive_bin = os.path.join(params.hive_bin, hive_bin)
-      
     Execute(daemon_cmd, 
       user = params.hive_user,
-      environment = { 'HADOOP_HOME': hadoop_home, 'JAVA_HOME': 
params.java64_home, 'HIVE_BIN': hive_bin },
+      environment = { 'JAVA_HOME': params.java64_home, 'HIVE_CMD': 
params.hive_cmd },
       path = params.execute_path,
       not_if = process_id_exists_command)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service_interactive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service_interactive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service_interactive.py
index 703d104..71c22d7 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service_interactive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_service_interactive.py
@@ -52,12 +52,11 @@ def hive_service_interactive(name, action='start', 
upgrade_type=None):
   if action == 'start':
     check_fs_root(params.hive_server_interactive_conf_dir, 
params.execute_path_hive_interactive)
     daemon_cmd = cmd
-    hadoop_home = params.hadoop_home
-    hive_interactive_bin = "hive2"
+    hive_interactive_bin = 
format("{stack_root}/current/hive-server2-hive2/bin/hive2")
 
     Execute(daemon_cmd,
             user = params.hive_user,
-            environment = { 'HADOOP_HOME': hadoop_home, 'JAVA_HOME': 
params.java64_home, 'HIVE_BIN': hive_interactive_bin },
+            environment = { 'JAVA_HOME': params.java64_home, 'HIVE_BIN': 
hive_interactive_bin },
             path = params.execute_path,
             not_if = process_id_exists_command)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
index fb2c84a..088a540 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/params_linux.py
@@ -36,19 +36,17 @@ from resource_management.libraries.functions import 
get_kinit_path
 from resource_management.libraries.functions.get_not_managed_resources import 
get_not_managed_resources
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.stack_features import 
get_stack_feature_version
 from resource_management.libraries.functions import upgrade_summary
 from resource_management.libraries.functions.get_port_from_url import 
get_port_from_url
 from resource_management.libraries.functions.expect import expect
 from resource_management.libraries import functions
-from resource_management.libraries.functions.setup_atlas_hook import 
has_atlas_in_cluster
-from ambari_commons.ambari_metrics_helper import 
select_metric_collector_hosts_from_hostnames
 from resource_management.libraries.functions.setup_ranger_plugin_xml import 
get_audit_configs, generate_ranger_service_config
 from resource_management.libraries.functions.get_architecture import 
get_architecture
 
 from resource_management.core.utils import PasswordString
-from resource_management.core.shell import checked_call
 from resource_management.core.exceptions import Fail
 from ambari_commons.credential_store_helper import 
get_password_from_credential_store
 
@@ -107,8 +105,10 @@ stack_supports_hive_interactive_ga = 
check_stack_feature(StackFeature.HIVE_INTER
 component_directory = status_params.component_directory
 component_directory_interactive = status_params.component_directory_interactive
 
-hadoop_home = format('{stack_root}/current/hadoop-client')
+hadoop_home = stack_select.get_hadoop_dir("home")
+
 hive_bin = format('{stack_root}/current/{component_directory}/bin')
+hive_cmd = os.path.join(hive_bin, "hive")
 hive_schematool_ver_bin = format('{stack_root}/{version}/hive/bin')
 hive_schematool_bin = format('{stack_root}/current/{component_directory}/bin')
 hive_lib = format('{stack_root}/current/{component_directory}/lib')
@@ -183,8 +183,6 @@ hive_server_conf_dir = status_params.hive_server_conf_dir
 
 hcat_conf_dir = '/etc/hive-hcatalog/conf'
 config_dir = '/etc/hive-webhcat/conf'
-hcat_lib = '/usr/lib/hive-hcatalog/share/hcatalog'
-webhcat_bin_dir = '/usr/lib/hive-hcatalog/sbin'
 
 # there are no client versions of these, use server versions directly
 hcat_lib = format('{stack_root}/current/hive-webhcat/share/hcatalog')

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/service_check.py
index d144c34..ce434b6 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/service_check.py
@@ -170,10 +170,8 @@ class HiveServiceCheckDefault(HiveServiceCheck):
     if kinit_cmd:
       beeline_url.append('principal={key}')
 
-    exec_path = params.execute_path
-    if params.version:
-      upgrade_hive_bin = format("{stack_root}/{version}/hive2/bin")
-      exec_path =  os.environ['PATH'] + os.pathsep + params.hadoop_bin_dir + 
os.pathsep + upgrade_hive_bin
+    hive_interactive_bin = 
format("{stack_root}/current/hive-server2-hive2/bin")
+    exec_path =  os.environ['PATH'] + os.pathsep + params.hadoop_bin_dir + 
os.pathsep + hive_interactive_bin
 
     # beeline path
     llap_cmd = "! beeline -u '%s'" % format(";".join(beeline_url))

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/webhcat_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/webhcat_service.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/webhcat_service.py
index cb4aafd..bddb5b2 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/webhcat_service.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/webhcat_service.py
@@ -40,30 +40,22 @@ def webhcat_service(action='start', rolling_restart=False):
 def webhcat_service(action='start', upgrade_type=None):
   import params
 
-  environ = {
-    'HADOOP_HOME': params.hadoop_home
-  }
-
   cmd = format('{webhcat_bin_dir}/webhcat_server.sh')
 
   if action == 'start':
-    if upgrade_type is not None and params.version and params.stack_root:
-      environ['HADOOP_HOME'] = format("{stack_root}/{version}/hadoop")
-
     daemon_cmd = format('cd {hcat_pid_dir} ; {cmd} start')
     no_op_test = format('ls {webhcat_pid_file} >/dev/null 2>&1 && ps -p `cat 
{webhcat_pid_file}` >/dev/null 2>&1')
     try:
       Execute(daemon_cmd,
               user=params.webhcat_user,
-              not_if=no_op_test,
-              environment = environ)
+              not_if=no_op_test)
     except:
       show_logs(params.hcat_log_dir, params.webhcat_user)
       raise
   elif action == 'stop':
     try:
       # try stopping WebHCat using its own script
-      graceful_stop(cmd, environ)
+      graceful_stop(cmd)
     except Fail:
       show_logs(params.hcat_log_dir, params.webhcat_user)
       Logger.info(traceback.format_exc())
@@ -95,17 +87,14 @@ def webhcat_service(action='start', upgrade_type=None):
 
     File(params.webhcat_pid_file, action="delete")
 
-def graceful_stop(cmd, environ):
+def graceful_stop(cmd):
   """
   Attemps to stop WebHCat using its own shell script. On some versions this 
may not correctly
   stop the daemon.
   :param cmd: the command to run to stop the daemon
-  :param environ: the environment variables to execute the command with
   :return:
   """
   import params
   daemon_cmd = format('{cmd} stop')
 
-  Execute(daemon_cmd,
-          user = params.webhcat_user,
-          environment = environ)
+  Execute(daemon_cmd, user = params.webhcat_user)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
 
b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
index 0a08121..293bcf8 100644
--- 
a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
+++ 
b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/configuration/kerberos-env.xml
@@ -349,8 +349,8 @@
     <on-ambari-upgrade add="true"/>
   </property>
   <property>
-    <name>group</name>
-    <display-name>IPA Group</display-name>
+    <name>ipa_user_group</name>
+    <display-name>IPA User Group</display-name>
     <description>
       The group in IPA user principals should be member of
     </description>
@@ -362,38 +362,6 @@
     <on-ambari-upgrade add="true"/>
   </property>
   <property>
-    <name>set_password_expiry</name>
-    <display-name>Set IPA principal password expiry</display-name>
-    <description>
-      Indicates whether Ambari should set the password expiry for the 
principals it creates. By default
-      IPA does not allow this. It requires write permission of the admin 
principal to the krbPasswordExpiry
-      attribute. If set IPA principal password expiry is not true it is 
assumed that a suitable password
-      policy is in place for the IPA Group principals are added to.
-    </description>
-    <value>false</value>
-    <value-attributes>
-      <type>boolean</type>
-      <overridable>false</overridable>
-      <visible>false</visible>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>password_chat_timeout</name>
-    <display-name>Set IPA kinit password chat timeout</display-name>
-    <description>
-      Indicates the timeout in seconds that Ambari should wait for a response 
during a password chat. This is
-      because it can take some time due to lookups before a response is there.
-    </description>
-    <value>5</value>
-    <value-attributes>
-      <visible>false</visible>
-      <type>int</type>
-      <overridable>false</overridable>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
     <name>preconfigure_services</name>
     <display-name>Pre-configure services</display-name>
     <description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/kerberos_common.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/kerberos_common.py
 
b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/kerberos_common.py
index 21accdd..fcaa547 100644
--- 
a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/kerberos_common.py
+++ 
b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-10/package/scripts/kerberos_common.py
@@ -441,10 +441,9 @@ class KerberosScript(Script):
           if principal is not None:
             curr_content = Script.structuredOut
 
-            if "keytabs" not in curr_content:
-              curr_content['keytabs'] = {}
-
-            curr_content['keytabs'][principal.replace("_HOST", 
params.hostname)] = '_REMOVED_'
+            if "removedKeytabs" not in curr_content:
+              curr_content['removedKeytabs'] = {}
+            curr_content['removedKeytabs'][principal.replace("_HOST", 
params.hostname)] = keytab_file_path
 
             self.put_structured_out(curr_content)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-30/configuration/kerberos-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-30/configuration/kerberos-env.xml
 
b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-30/configuration/kerberos-env.xml
index 0a08121..293bcf8 100644
--- 
a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-30/configuration/kerberos-env.xml
+++ 
b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-30/configuration/kerberos-env.xml
@@ -349,8 +349,8 @@
     <on-ambari-upgrade add="true"/>
   </property>
   <property>
-    <name>group</name>
-    <display-name>IPA Group</display-name>
+    <name>ipa_user_group</name>
+    <display-name>IPA User Group</display-name>
     <description>
       The group in IPA user principals should be member of
     </description>
@@ -362,38 +362,6 @@
     <on-ambari-upgrade add="true"/>
   </property>
   <property>
-    <name>set_password_expiry</name>
-    <display-name>Set IPA principal password expiry</display-name>
-    <description>
-      Indicates whether Ambari should set the password expiry for the 
principals it creates. By default
-      IPA does not allow this. It requires write permission of the admin 
principal to the krbPasswordExpiry
-      attribute. If set IPA principal password expiry is not true it is 
assumed that a suitable password
-      policy is in place for the IPA Group principals are added to.
-    </description>
-    <value>false</value>
-    <value-attributes>
-      <type>boolean</type>
-      <overridable>false</overridable>
-      <visible>false</visible>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>password_chat_timeout</name>
-    <display-name>Set IPA kinit password chat timeout</display-name>
-    <description>
-      Indicates the timeout in seconds that Ambari should wait for a response 
during a password chat. This is
-      because it can take some time due to lookups before a response is there.
-    </description>
-    <value>5</value>
-    <value-attributes>
-      <visible>false</visible>
-      <type>int</type>
-      <overridable>false</overridable>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
     <name>preconfigure_services</name>
     <display-name>Pre-configure services</display-name>
     <description>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-30/package/scripts/kerberos_common.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-30/package/scripts/kerberos_common.py
 
b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-30/package/scripts/kerberos_common.py
index 21accdd..fcaa547 100644
--- 
a/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-30/package/scripts/kerberos_common.py
+++ 
b/ambari-server/src/main/resources/common-services/KERBEROS/1.10.3-30/package/scripts/kerberos_common.py
@@ -441,10 +441,9 @@ class KerberosScript(Script):
           if principal is not None:
             curr_content = Script.structuredOut
 
-            if "keytabs" not in curr_content:
-              curr_content['keytabs'] = {}
-
-            curr_content['keytabs'][principal.replace("_HOST", 
params.hostname)] = '_REMOVED_'
+            if "removedKeytabs" not in curr_content:
+              curr_content['removedKeytabs'] = {}
+            curr_content['removedKeytabs'][principal.replace("_HOST", 
params.hostname)] = keytab_file_path
 
             self.put_structured_out(curr_content)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
index c1151fc..b15d158 100644
--- 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
@@ -71,8 +71,7 @@ class MahoutServiceCheck(Script):
     Execute( mahout_command,
              tries = 3,
              try_sleep = 5,
-             environment={'HADOOP_HOME': params.hadoop_home,'HADOOP_CONF_DIR': 
params.hadoop_conf_dir,
-                          'MAHOUT_HOME': params.mahout_home,'JAVA_HOME': 
params.java64_home},
+             environment={'MAHOUT_HOME': params.mahout_home,'JAVA_HOME': 
params.java64_home},
              path = format('/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'),
              user = params.smokeuser
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
index f215a1e..29813d3 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
@@ -35,9 +35,9 @@ from resource_management.libraries.functions.stack_features 
import check_stack_f
 from resource_management.libraries.functions.oozie_prepare_war import 
prepare_war
 from resource_management.libraries.functions.copy_tarball import 
get_current_version
 from resource_management.libraries.resources.xml_config import XmlConfig
+from resource_management.libraries.functions.lzo_utils import 
install_lzo_if_needed
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.security_commons import 
update_credential_provider_path
-from resource_management.libraries.functions.get_lzo_packages import 
get_lzo_packages
 from resource_management.core.resources.packaging import Package
 from resource_management.core.shell import as_user, as_sudo, call, checked_call
 from resource_management.core.exceptions import Fail
@@ -190,6 +190,11 @@ def oozie(is_server=False, upgrade_type=None):
 
   oozie_ownership()
   
+  if params.lzo_enabled:
+    install_lzo_if_needed()
+    Execute(format('{sudo} cp {hadoop_lib_home}/hadoop-lzo*.jar 
{oozie_lib_dir}'),
+    )
+  
   if is_server:
     oozie_server_specific(upgrade_type)
   
@@ -238,14 +243,14 @@ def get_oozie_ext_zip_source_paths(upgrade_type, params):
 
 def oozie_server_specific(upgrade_type):
   import params
-  
+
   no_op_test = as_user(format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat 
{pid_file}` >/dev/null 2>&1"), user=params.oozie_user)
-  
+
   File(params.pid_file,
     action="delete",
     not_if=no_op_test
   )
-  
+
   oozie_server_directories = [format("{oozie_home}/{oozie_tmp_dir}"), 
params.oozie_pid_dir, params.oozie_log_dir, params.oozie_tmp_dir, 
params.oozie_data_dir, params.oozie_lib_dir, params.oozie_webapps_dir, 
params.oozie_webapps_conf_dir, params.oozie_server_dir]
   Directory( oozie_server_directories,
     owner = params.oozie_user,
@@ -254,25 +259,25 @@ def oozie_server_specific(upgrade_type):
     create_parents = True,
     cd_access="a",
   )
-  
+
   Directory(params.oozie_libext_dir,
             create_parents = True,
   )
-  
+
   hashcode_file = format("{oozie_home}/.hashcode")
   skip_recreate_sharelib = format("test -f {hashcode_file} && test -d 
{oozie_home}/share")
 
   untar_sharelib = 
('tar','-xvf',format('{oozie_home}/oozie-sharelib.tar.gz'),'-C',params.oozie_home)
 
   Execute( untar_sharelib,    # time-expensive
-    not_if  = format("{no_op_test} || {skip_recreate_sharelib}"), 
+    not_if  = format("{no_op_test} || {skip_recreate_sharelib}"),
     sudo = True,
   )
 
   configure_cmds = []
   # Default to /usr/share/$TARGETSTACK-oozie/ext-2.2.zip as the first path
   source_ext_zip_paths = get_oozie_ext_zip_source_paths(upgrade_type, params)
-  
+
   # Copy the first oozie ext-2.2.zip file that is found.
   # This uses a list to handle the cases when migrating from some versions of 
BigInsights to HDP.
   if source_ext_zip_paths is not None:
@@ -286,8 +291,8 @@ def oozie_server_specific(upgrade_type):
                 sudo=True,
                 )
         break
-  
-  
+
+
   Directory(params.oozie_webapps_conf_dir,
             owner = params.oozie_user,
             group = params.user_group,
@@ -306,15 +311,6 @@ def oozie_server_specific(upgrade_type):
     Execute(format('{sudo} chown {oozie_user}:{user_group} 
{oozie_libext_dir}/falcon-oozie-el-extension-*.jar'),
       not_if  = no_op_test)
 
-  if params.lzo_enabled:
-    all_lzo_packages = get_lzo_packages(params.stack_version_unformatted)
-    Package(all_lzo_packages,
-            
retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability,
-            retry_count=params.agent_stack_retry_count)
-    Execute(format('{sudo} cp {hadoop_lib_home}/hadoop-lzo*.jar 
{oozie_lib_dir}'),
-      not_if  = no_op_test,
-    )
-
   prepare_war(params)
 
   File(hashcode_file,
@@ -365,7 +361,7 @@ def oozie_server_specific(upgrade_type):
   Directory(params.oozie_server_dir,
     owner = params.oozie_user,
     group = params.user_group,
-    recursive_ownership = True,  
+    recursive_ownership = True,
   )
   if params.security_enabled:
     File(os.path.join(params.conf_dir, 'zkmigrator_jaas.conf'),
@@ -410,7 +406,7 @@ def 
copy_atlas_hive_hook_to_dfs_share_lib(upgrade_type=None, upgrade_direction=N
   effective_version = params.stack_version_formatted if upgrade_type is None 
else format_stack_version(params.version)
   if not check_stack_feature(StackFeature.ATLAS_HOOK_SUPPORT, 
effective_version):
     return
-    
+
   # Important that oozie_server_hostnames is sorted by name so that this only 
runs on a single Oozie server.
   if not (len(params.oozie_server_hostnames) > 0 and params.hostname == 
params.oozie_server_hostnames[0]):
     Logger.debug("Will not attempt to copy Atlas Hive hook to DFS since this 
is not the first Oozie Server "

http://git-wip-us.apache.org/repos/asf/ambari/blob/e83bf1bd/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index a0f0672..125ecfe 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -30,6 +30,7 @@ from resource_management.libraries.functions import 
get_port_from_url
 from resource_management.libraries.functions.get_not_managed_resources import 
get_not_managed_resources
 from resource_management.libraries.functions.setup_atlas_hook import 
has_atlas_in_cluster
 from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.lzo_utils import 
should_install_lzo
 from resource_management.libraries.functions.expect import expect
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions.get_architecture import 
get_architecture
@@ -384,6 +385,4 @@ HdfsResource = functools.partial(
 
 is_webhdfs_enabled = 
config['configurations']['hdfs-site']['dfs.webhdfs.enabled']
 
-# The logic for LZO also exists in HDFS' params.py
-io_compression_codecs = 
default("/configurations/core-site/io.compression.codecs", None)
-lzo_enabled = io_compression_codecs is not None and 
"com.hadoop.compression.lzo" in io_compression_codecs.lower()
+lzo_enabled = should_install_lzo()

Reply via email to