http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka.py
deleted file mode 100644
index 0355bbc..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka.py
+++ /dev/null
@@ -1,244 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-import collections
-import os
-
-from resource_management import *
-from resource_management.libraries.resources.properties_file import 
PropertiesFile
-from resource_management.libraries.resources.template_config import 
TemplateConfig
-from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
-from resource_management.core.resources.system import Directory, Execute, 
File, Link
-from resource_management.core.source import StaticFile, Template, 
InlineTemplate
-from resource_management.libraries.functions import format
-from resource_management.core.logger import Logger
-import sys, os
-from copy import deepcopy
-
-def kafka(upgrade_type=None):
-    import params
-
-    ensure_base_directories()
-    kafka_server_config = 
mutable_config_dict(params.config['configurations']['kafka-broker'])
-    # This still has an issue of hostnames being alphabetically out-of-order 
for broker.id in IOP-4.1.
-    # Starting in IOP 4.2, Kafka handles the generation of broker.id so Ambari 
doesn't have to.
-
-    effective_version = params.iop_stack_version if upgrade_type is None else 
format_hdp_stack_version(params.version)
-    Logger.info(format("Effective stack version: {effective_version}"))
-
-    if effective_version is not None and effective_version != "" and 
compare_versions(effective_version, '4.2.0.0') < 0:
-      brokerid = str(sorted(params.kafka_hosts).index(params.hostname))
-      kafka_server_config['broker.id'] = brokerid
-      Logger.info(format("Calculating broker.id as {brokerid}"))
-
-    # listeners and advertised.listeners are only added in 4.2.0.0 onwards.
-    if effective_version is not None and effective_version != "" and 
compare_versions(effective_version, '4.2.0.0') >= 0:
-      listeners = kafka_server_config['listeners'].replace("localhost", 
params.hostname)
-      Logger.info(format("Kafka listeners: {listeners}"))
-
-      if params.security_enabled and params.kafka_kerberos_enabled:
-        Logger.info("Kafka kerberos security is enabled.")
-        if "SASL" not in listeners:
-          listeners = listeners.replace("PLAINTEXT", "SASL_PLAINTEXT")
-
-        kafka_server_config['listeners'] = listeners
-        kafka_server_config['advertised.listeners'] = listeners
-        Logger.info(format("Kafka advertised listeners: {listeners}"))
-      else:
-        kafka_server_config['listeners'] = listeners
-
-        if 'authorizer.class.name' in kafka_server_config:
-          del kafka_server_config['authorizer.class.name']
-        if 'principal.to.local.class' in kafka_server_config:
-          del kafka_server_config['principal.to.local.class']
-        if 'super.users' in kafka_server_config:
-          del kafka_server_config['super.users']
-
-        if 'advertised.listeners' in kafka_server_config:
-          advertised_listeners = 
kafka_server_config['advertised.listeners'].replace("localhost", 
params.hostname)
-          kafka_server_config['advertised.listeners'] = advertised_listeners
-          Logger.info(format("Kafka advertised listeners: 
{advertised_listeners}"))
-    else:
-      kafka_server_config['host.name'] = params.hostname
-
-
-    if(params.has_metric_collector):
-      kafka_server_config['kafka.timeline.metrics.host'] = 
params.metric_collector_host
-      kafka_server_config['kafka.timeline.metrics.port'] = 
params.metric_collector_port
-
-    kafka_data_dir = kafka_server_config['log.dirs']
-    kafka_data_dirs = filter(None, kafka_data_dir.split(","))
-    Directory(kafka_data_dirs[:],  # Todo: remove list copy when AMBARI-14373 
is fixed
-              mode=0755,
-              cd_access='a',
-              owner=params.kafka_user,
-              group=params.user_group,
-              recursive=True)
-    set_dir_ownership(kafka_data_dirs)
-
-    PropertiesFile("server.properties",
-                      dir=params.conf_dir,
-                      properties=kafka_server_config,
-                      owner=params.kafka_user,
-                      group=params.user_group,
-    )
-
-    File(format("{conf_dir}/kafka-env.sh"),
-          owner=params.kafka_user,
-          content=InlineTemplate(params.kafka_env_sh_template)
-     )
-
-    if (params.log4j_props != None):
-        File(format("{conf_dir}/log4j.properties"),
-             mode=0644,
-             group=params.user_group,
-             owner=params.kafka_user,
-             content=params.log4j_props
-         )
-
-    if params.security_enabled and params.kafka_kerberos_enabled:
-        TemplateConfig(format("{conf_dir}/kafka_jaas.conf"),
-                         owner=params.kafka_user)
-
-        TemplateConfig(format("{conf_dir}/kafka_client_jaas.conf"),
-                       owner=params.kafka_user)
-
-    # On some OS this folder could be not exists, so we will create it before 
pushing there files
-    Directory(params.limits_conf_dir,
-              recursive=True,
-              owner='root',
-              group='root'
-    )
-
-    File(os.path.join(params.limits_conf_dir, 'kafka.conf'),
-         owner='root',
-         group='root',
-         mode=0644,
-         content=Template("kafka.conf.j2")
-    )
-
-    setup_symlink(params.kafka_managed_pid_dir, params.kafka_pid_dir)
-    setup_symlink(params.kafka_managed_log_dir, params.kafka_log_dir)
-
-
-def mutable_config_dict(kafka_broker_config):
-    kafka_server_config = {}
-    for key, value in kafka_broker_config.iteritems():
-        kafka_server_config[key] = value
-    return kafka_server_config
-
-# Used to workaround the hardcoded pid/log dir used on the kafka bash process 
launcher
-def setup_symlink(kafka_managed_dir, kafka_ambari_managed_dir):
-  import params
-  backup_folder_path = None
-  backup_folder_suffix = "_tmp"
-  if kafka_ambari_managed_dir != kafka_managed_dir:
-    if os.path.exists(kafka_managed_dir) and not 
os.path.islink(kafka_managed_dir):
-
-      # Backup existing data before delete if config is changed repeatedly 
to/from default location at any point in time time, as there may be relevant 
contents (historic logs)
-      backup_folder_path = backup_dir_contents(kafka_managed_dir, 
backup_folder_suffix)
-
-      Directory(kafka_managed_dir,
-                action="delete",
-                recursive=True)
-
-    elif os.path.islink(kafka_managed_dir) and 
os.path.realpath(kafka_managed_dir) != kafka_ambari_managed_dir:
-      Link(kafka_managed_dir,
-           action="delete")
-
-    if not os.path.islink(kafka_managed_dir):
-      Link(kafka_managed_dir,
-           to=kafka_ambari_managed_dir)
-
-  elif os.path.islink(kafka_managed_dir): # If config is changed and coincides 
with the kafka managed dir, remove the symlink and physically create the folder
-    Link(kafka_managed_dir,
-         action="delete")
-
-    Directory(kafka_managed_dir,
-              mode=0755,
-              cd_access='a',
-              owner=params.kafka_user,
-              group=params.user_group,
-              recursive=True)
-    set_dir_ownership(kafka_managed_dir)
-
-  if backup_folder_path:
-    # Restore backed up files to current relevant dirs if needed - will be 
triggered only when changing to/from default path;
-    for file in os.listdir(backup_folder_path):
-      File(os.path.join(kafka_managed_dir,file),
-           owner=params.kafka_user,
-           content = StaticFile(os.path.join(backup_folder_path,file)))
-
-    # Clean up backed up folder
-    Directory(backup_folder_path,
-              action="delete",
-              recursive=True)
-
-
-# Uses agent temp dir to store backup files
-def backup_dir_contents(dir_path, backup_folder_suffix):
-  import params
-  backup_destination_path = params.tmp_dir + 
os.path.normpath(dir_path)+backup_folder_suffix
-  Directory(backup_destination_path,
-            mode=0755,
-            cd_access='a',
-            owner=params.kafka_user,
-            group=params.user_group,
-            recursive=True
-  )
-  set_dir_ownership(backup_destination_path)
-  # Safely copy top-level contents to backup folder
-  for file in os.listdir(dir_path):
-    File(os.path.join(backup_destination_path, file),
-         owner=params.kafka_user,
-         content = StaticFile(os.path.join(dir_path,file)))
-
-  return backup_destination_path
-
-
-def ensure_base_directories():
-  """
-  Make basic Kafka directories, and make sure that their ownership is correct
-  """
-  import params
-  base_dirs = [params.kafka_log_dir, params.kafka_pid_dir, params.conf_dir]
-  Directory(base_dirs[:],  # Todo: remove list copy when AMBARI-14373 is fixed
-            mode=0755,
-            cd_access='a',
-            owner=params.kafka_user,
-            group=params.user_group,
-            recursive=True
-            )
-  set_dir_ownership(base_dirs)
-
-def set_dir_ownership(targets):
-  import params
-  if isinstance(targets, collections.Iterable):
-    directories = targets
-  else:  # If target is a single object, convert it to list
-    directories = [targets]
-  for directory in directories:
-    # If path is empty or a single slash,
-    # may corrupt filesystem permissions
-    if len(directory) > 1:
-      Execute(('chown', '-R', format("{kafka_user}:{user_group}"), directory),
-            sudo=True)
-    else:
-      Logger.warning("Permissions for the folder \"%s\" were not updated due 
to "
-            "empty path passed: " % directory)

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
deleted file mode 100644
index ff2d750..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_broker.py
+++ /dev/null
@@ -1,107 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import iop_select
-from resource_management.libraries.functions import Direction
-from resource_management.libraries.functions.version import compare_versions, 
format_hdp_stack_version
-from resource_management import Script
-from resource_management.core.logger import Logger
-from resource_management.core.resources.system import Execute, File, Directory
-from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions.check_process_status import 
check_process_status
-from kafka import ensure_base_directories
-import time
-
-import upgrade
-from kafka import kafka
-
-class KafkaBroker(Script):
-
-  def get_stack_to_component(self):
-    return {"BigInsights": "kafka-broker"}
-
-  def install(self, env):
-    self.install_packages(env)
-
-  def configure(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-    kafka(upgrade_type=upgrade_type)
-
-  def pre_upgrade_restart(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-
-    if params.version and 
compare_versions(format_hdp_stack_version(params.version), '4.1.0.0') >= 0:
-      iop_select.select("kafka-broker", params.version)
-
-    if params.version and 
compare_versions(format_hdp_stack_version(params.version), '4.1.0.0') >= 0:
-      conf_select.select(params.stack_name, "kafka", params.version)
-
-    # This is extremely important since it should only be called if crossing 
the IOP 4.2 boundary. 
-    if params.current_version and params.version and params.upgrade_direction:
-      src_version = dst_version = None
-      if params.upgrade_direction == Direction.UPGRADE:
-        src_version = format_hdp_stack_version(params.current_version)
-        dst_version = format_hdp_stack_version(params.version)
-      else:
-        # These represent the original values during the UPGRADE direction
-        src_version = format_hdp_stack_version(params.version)
-        dst_version = format_hdp_stack_version(params.downgrade_from_version)
-
-      if compare_versions(src_version, '4.2.0.0') < 0 and 
compare_versions(dst_version, '4.2.0.0') >= 0:
-        # Upgrade from IOP 4.1 to 4.2, Calling the acl migration script 
requires the configs to be present.
-        self.configure(env, upgrade_type=upgrade_type)
-        upgrade.run_migration(env, upgrade_type)
-
-  def start(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-    self.configure(env, upgrade_type=upgrade_type)
-    daemon_cmd = format('source {params.conf_dir}/kafka-env.sh ; 
{params.kafka_bin} start')
-    no_op_test = format('ls {params.kafka_pid_file} >/dev/null 2>&1 && ps -p 
`cat {params.kafka_pid_file}` >/dev/null 2>&1')
-    Execute(daemon_cmd,
-            user=params.kafka_user,
-            not_if=no_op_test
-    )
-
-  def stop(self, env, upgrade_type=None):
-    import params
-    env.set_params(params)
-    ensure_base_directories()
-    daemon_cmd = format('source {params.conf_dir}/kafka-env.sh; 
{params.kafka_bin} stop')
-    Execute(daemon_cmd,
-            user=params.kafka_user,
-    )
-    File (params.kafka_pid_file, 
-          action = "delete"
-    )
-    # waiting for zk remove kafka nodes
-    print 'Sleeping for ', params.zk_session_timeout, 'ms'
-    time.sleep(params.zk_session_timeout/1000)
-
-
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    check_process_status(status_params.kafka_pid_file)
-
-if __name__ == "__main__":
-  KafkaBroker().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_upgrade.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_upgrade.py
deleted file mode 100755
index da6886c..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/kafka_upgrade.py
+++ /dev/null
@@ -1,41 +0,0 @@
-
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import os
-from resource_management import *
-from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions import Direction
-
-class KafkaUpgrade(Script):
-  def copy_config(self, env):
-    """
-    Copy Kafka Config files from  /usr/iop/4.2.0.0/etc/kafka/conf.dist/ to  
/usr/iop/4.2.0.0/kafka/conf.
-    cp -r /usr/iop/4.2.0.0/etc/kafka/conf.dist/* /usr/iop/4.2.0.0/kafka/conf.
-    """
-    import params
-    if params.upgrade_direction is not None and params.upgrade_direction == 
Direction.UPGRADE:
-      kafka_src_config_dir="/usr/iop/4.2.0.0/etc/kafka/conf.dist"
-      kafka_dest_config_dir="/usr/iop/4.2.0.0/kafka/conf"
-      if os.path.isdir(kafka_src_config_dir) and 
os.path.islink(kafka_dest_config_dir):
-        Execute(('cp', '-r', kafka_src_config_dir +"/.", 
kafka_dest_config_dir),sudo=True,logoutput=True)
-
-if __name__ == "__main__":
-  KafkaUpgrade().execute()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/params.py
deleted file mode 100644
index 2495789..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/params.py
+++ /dev/null
@@ -1,157 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-from resource_management.libraries.functions import format
-from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
-from resource_management.libraries.functions.default import default
-from utils import get_bare_principal
-from resource_management.libraries.functions.get_hdp_version import 
get_hdp_version
-from resource_management.libraries.functions.is_empty import is_empty
-import status_params
-from resource_management.core.logger import Logger
-from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.functions import iop_select
-from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import get_kinit_path
-
-
-# server configurations
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-stack_name = default("/hostLevelParams/stack_name", None)
-retryAble = default("/commandParams/command_retry_enabled", False)
-
-# Version being upgraded/downgraded to
-version = default("/commandParams/version", None)
-# Version that is CURRENT.
-current_version = default("/hostLevelParams/current_version", None)
-
-host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
-
-stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-iop_stack_version = format_hdp_stack_version(stack_version_unformatted)
-upgrade_direction = default("/commandParams/upgrade_direction", None)
-
-# When downgrading the 'version' and 'current_version' are both pointing to 
the downgrade-target version
-# downgrade_from_version provides the source-version the downgrade is 
happening from
-downgrade_from_version = default("/commandParams/downgrade_from_version", None)
-
-hostname = config['hostname']
-
-# default kafka parameters
-kafka_home = '/usr/iop/current/kafka-broker'
-kafka_bin = kafka_home+'/bin/kafka'
-conf_dir = "/usr/iop/current/kafka-broker/config"
-limits_conf_dir = "/etc/security/limits.d"
-
-# Used while upgrading the stack in a kerberized cluster and running 
kafka-acls.sh
-zookeeper_connect = default("/configurations/kafka-broker/zookeeper.connect", 
None)
-
-kafka_user_nofile_limit = 
default("/configurations/kafka-env/kafka_user_nofile_limit", None)
-kafka_user_nproc_limit = 
default("/configurations/kafka-env/kafka_user_nproc_limit", None)
-
-kafka_user = config['configurations']['kafka-env']['kafka_user']
-kafka_log_dir = config['configurations']['kafka-env']['kafka_log_dir']
-kafka_pid_dir = status_params.kafka_pid_dir
-kafka_pid_file = kafka_pid_dir+"/kafka.pid"
-# This is hardcoded on the kafka bash process lifecycle on which we have no 
control over
-kafka_managed_pid_dir = "/var/run/kafka"
-kafka_managed_log_dir = "/var/log/kafka"
-user_group = config['configurations']['cluster-env']['user_group']
-java64_home = config['hostLevelParams']['java_home']
-kafka_env_sh_template = config['configurations']['kafka-env']['content']
-kafka_hosts = config['clusterHostInfo']['kafka_broker_hosts']
-kafka_hosts.sort()
-zk_session_timeout = 
config['configurations']['kafka-broker']['zookeeper.session.timeout.ms']
-
-zookeeper_hosts = config['clusterHostInfo']['zookeeper_hosts']
-zookeeper_hosts.sort()
-
-if (('kafka-log4j' in config['configurations']) and ('content' in 
config['configurations']['kafka-log4j'])):
-    log4j_props = config['configurations']['kafka-log4j']['content']
-else:
-    log4j_props = None
-
-metric_collector_host = ""
-metric_collector_port = ""
-
-ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", [])
-has_metric_collector = not len(ams_collector_hosts) == 0
-
-if has_metric_collector:
-  if 'cluster-env' in config['configurations'] and \
-      'metrics_collector_vip_host' in config['configurations']['cluster-env']:
-    metric_collector_host = 
config['configurations']['cluster-env']['metrics_collector_vip_host']
-  else:
-    metric_collector_host = ams_collector_hosts[0]
-  if 'cluster-env' in config['configurations'] and \
-      'metrics_collector_vip_port' in config['configurations']['cluster-env']:
-    metric_collector_port = 
config['configurations']['cluster-env']['metrics_collector_vip_port']
-  else:
-    metric_collector_web_address = 
default("/configurations/ams-site/timeline.metrics.service.webapp.address", 
"0.0.0.0:6188")
-    if metric_collector_web_address.find(':') != -1:
-      metric_collector_port = metric_collector_web_address.split(':')[1]
-    else:
-      metric_collector_port = '6188'
-  pass
-
-# Security-related params
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-kafka_kerberos_enabled = ('security.inter.broker.protocol' in 
config['configurations']['kafka-broker'] and
-                          
config['configurations']['kafka-broker']['security.inter.broker.protocol'] == 
"SASL_PLAINTEXT")
-
-if security_enabled and iop_stack_version != "" and 'kafka_principal_name' in 
config['configurations']['kafka-env'] and compare_versions(iop_stack_version, 
'4.1') >= 0:
-    _hostname_lowercase = config['hostname'].lower()
-    _kafka_principal_name = 
config['configurations']['kafka-env']['kafka_principal_name']
-    kafka_jaas_principal = 
_kafka_principal_name.replace('_HOST',_hostname_lowercase)
-    kafka_keytab_path = config['configurations']['kafka-env']['kafka_keytab']
-    kafka_bare_jaas_principal = get_bare_principal(_kafka_principal_name)
-    kafka_kerberos_params = "-Djava.security.auth.login.config="+ conf_dir 
+"/kafka_jaas.conf"
-else:
-    kafka_kerberos_params = ''
-
-namenode_hosts = default("/clusterHostInfo/namenode_host", [])
-has_namenode = not len(namenode_hosts) == 0
-
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user'] if 
has_namenode else None
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab'] 
if has_namenode else None
-hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode 
else None
-hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
-default_fs = config['configurations']['core-site']['fs.defaultFS'] if 
has_namenode else None
-hadoop_bin_dir = iop_select.get_hadoop_dir("bin") if has_namenode else None
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
-kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
-
-import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete hdfs directory/file/copyfromlocal we need to call 
params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir,
-  principal_name = hdfs_principal_name,
-  hdfs_site = hdfs_site,
-  default_fs = default_fs
-)
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/service_check.py
deleted file mode 100644
index a42d894..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/service_check.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions.validate import 
call_and_match_output
-from resource_management.libraries.functions.format import format
-from resource_management.core.logger import Logger
-from resource_management.core import sudo
-import subprocess
-
-class ServiceCheck(Script):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-
-    # TODO, Kafka Service check should be more robust , It should get all the 
broker_hosts
-    # Produce some messages and check if consumer reads same no.of messages.
-
-    kafka_config = self.read_kafka_config()
-    topic = "ambari_kafka_service_check"
-    create_topic_cmd_created_output = "Created topic 
\"ambari_kafka_service_check\"."
-    create_topic_cmd_exists_output = "Topic \"ambari_kafka_service_check\" 
already exists."
-    source_cmd = format("source {conf_dir}/kafka-env.sh")
-    topic_exists_cmd = format("{kafka_home}/bin/kafka-topics.sh --zookeeper 
{kafka_config[zookeeper.connect]} --topic {topic} --list")
-    topic_exists_cmd_p = subprocess.Popen(topic_exists_cmd.split(" "), 
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    topic_exists_cmd_out, topic_exists_cmd_err = 
topic_exists_cmd_p.communicate()
-    # run create topic command only if the topic doesn't exists
-    if topic not in topic_exists_cmd_out:
-      create_topic_cmd = format("{kafka_home}/bin/kafka-topics.sh --zookeeper 
{kafka_config[zookeeper.connect]} --create --topic {topic} --partitions 1 
--replication-factor 1")
-      command = source_cmd + " ; " + create_topic_cmd
-      Logger.info("Running kafka create topic command: %s" % command)
-      call_and_match_output(command, 
format("({create_topic_cmd_created_output})|({create_topic_cmd_exists_output})"),
 "Failed to check that topic exists", user=params.kafka_user)
-
-  def read_kafka_config(self):
-    import params
-
-    kafka_config = {}
-    content = sudo.read_file(params.conf_dir + "/server.properties")
-    for line in content.splitlines():
-      if line.startswith("#") or not line.strip():
-        continue
-
-      key, value = line.split("=")
-      kafka_config[key] = value.replace("\n", "")
-
-    return kafka_config
-
-if __name__ == "__main__":
-    ServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/status_params.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/status_params.py
deleted file mode 100644
index 57bdf5e..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/status_params.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-from resource_management.libraries.functions import format
-from resource_management.libraries.script.script import Script
-
-config = Script.get_config()
-
-kafka_pid_dir = config['configurations']['kafka-env']['kafka_pid_dir']
-kafka_pid_file = format("{kafka_pid_dir}/kafka.pid")

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/upgrade.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/upgrade.py
deleted file mode 100644
index f0d278b..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/upgrade.py
+++ /dev/null
@@ -1,78 +0,0 @@
-
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-import os
-
-from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions import format
-from resource_management.libraries.functions import Direction
-from resource_management.core.exceptions import Fail
-from resource_management.core.logger import Logger
-
-def run_migration(env, upgrade_type):
-  """
-  If the acl migration script is present, then run it for either upgrade or 
downgrade.
-  That script was introduced in IOP 4.1.0.0 and requires stopping all Kafka 
brokers first.
-  Requires configs to be present.
-  :param env: Environment.
-  :param upgrade_type: "rolling" or "nonrolling
-  """
-  import params
-
-  if upgrade_type is None:
-    raise Fail('Parameter "upgrade_type" is missing.')
-
-  if params.upgrade_direction is None:
-    raise Fail('Parameter "upgrade_direction" is missing.')
-
-  if params.upgrade_direction == Direction.DOWNGRADE and 
params.downgrade_from_version is None:
-    raise Fail('Parameter "downgrade_from_version" is missing.')
-
-  if not params.security_enabled:
-    Logger.info("Skip running the Kafka ACL migration script since cluster 
security is not enabled.")
-    return
-  
-  Logger.info("Upgrade type: {0}, direction: {1}".format(str(upgrade_type), 
params.upgrade_direction))
-
-  # If the schema upgrade script exists in the version upgrading to, then 
attempt to upgrade/downgrade it while still using the present bits.
-  kafka_acls_script = None
-  command_suffix = ""
-  if params.upgrade_direction == Direction.UPGRADE:
-    kafka_acls_script = format("/usr/iop/{version}/kafka/bin/kafka-acls.sh")
-    command_suffix = "--upgradeAcls"
-  # elif params.upgrade_direction == Direction.DOWNGRADE:
-  #   kafka_acls_script = 
format("/usr/iop/{downgrade_from_version}/kafka/bin/kafka-acls.sh")
-  #   command_suffix = "--downgradeAcls"
-
-  if kafka_acls_script is not None:
-    if os.path.exists(kafka_acls_script):
-      Logger.info("Found Kafka acls script: {0}".format(kafka_acls_script))
-      if params.zookeeper_connect is None:
-        raise Fail("Could not retrieve property 
kafka-broker/zookeeper.connect")
-
-      acls_command = "{0} --authorizer kafka.security.auth.SimpleAclAuthorizer 
--authorizer-properties zookeeper.connect={1} {2}".\
-        format(kafka_acls_script, params.zookeeper_connect, command_suffix)
-
-      Execute(acls_command,
-              user=params.kafka_user,
-              logoutput=True)
-    else:
-      Logger.info("Did not find Kafka acls script: 
{0}".format(kafka_acls_script))
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/utils.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/utils.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/utils.py
deleted file mode 100644
index 2f1fa5e..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/scripts/utils.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-import re
-
-def get_bare_principal(normalized_principal_name):
-    """
-    Given a normalized principal name 
(nimbus/c6501.ambari.apache....@example.com) returns just the
-    primary component (nimbus)
-    :param normalized_principal_name: a string containing the principal name 
to process
-    :return: a string containing the primary component value or None if not 
valid
-    """
-
-    bare_principal = None
-
-    if normalized_principal_name:
-        match = re.match(r"([^/@]+)(?:/[^@])?(?:@.*)?", 
normalized_principal_name)
-
-    if match:
-        bare_principal = match.group(1)
-
-    return bare_principal

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/templates/kafka.conf.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/templates/kafka.conf.j2
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/templates/kafka.conf.j2
deleted file mode 100644
index 3f15d11..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/templates/kafka.conf.j2
+++ /dev/null
@@ -1,35 +0,0 @@
-#{
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-{{kafka_user}}   - nofile   {{kafka_user_nofile_limit}}
-{{kafka_user}}   - nproc    {{kafka_user_nproc_limit}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/templates/kafka_client_jaas.conf.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/templates/kafka_client_jaas.conf.j2
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/templates/kafka_client_jaas.conf.j2
deleted file mode 100644
index 7f81d85..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/templates/kafka_client_jaas.conf.j2
+++ /dev/null
@@ -1,29 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-KafkaClient {
-   com.sun.security.auth.module.Krb5LoginModule required
-   useTicketCache=true
-   renewTicket=true
-   serviceName="{{kafka_bare_jaas_principal}}";
-};
-Client {
-   com.sun.security.auth.module.Krb5LoginModule required
-   useTicketCache=true
-   renewTicket=true
-   serviceName="zookeeper";
-};

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/templates/kafka_jaas.conf.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/templates/kafka_jaas.conf.j2
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/templates/kafka_jaas.conf.j2
deleted file mode 100644
index 56c558d..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KAFKA/package/templates/kafka_jaas.conf.j2
+++ /dev/null
@@ -1,41 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-KafkaServer {
-   com.sun.security.auth.module.Krb5LoginModule required
-   useKeyTab=true
-   keyTab="{{kafka_keytab_path}}"
-   storeKey=true
-   useTicketCache=false
-   serviceName="{{kafka_bare_jaas_principal}}"
-   principal="{{kafka_jaas_principal}}";
-};
-KafkaClient {
-   com.sun.security.auth.module.Krb5LoginModule required
-   useTicketCache=true
-   renewTicket=true
-   serviceName="{{kafka_bare_jaas_principal}}";
-};
-Client {
-   com.sun.security.auth.module.Krb5LoginModule required
-   useKeyTab=true
-   keyTab="{{kafka_keytab_path}}"
-   storeKey=true
-   useTicketCache=false
-   serviceName="zookeeper"
-   principal="{{kafka_jaas_principal}}";
-};

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/configuration/kerberos-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/configuration/kerberos-env.xml
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/configuration/kerberos-env.xml
deleted file mode 100755
index 979eee4..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/configuration/kerberos-env.xml
+++ /dev/null
@@ -1,306 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration supports_final="false">
-  <property require-input="true">
-    <name>kdc_type</name>
-    <display-name>KDC type</display-name>
-    <value>mit-kdc</value>
-    <description>
-      The type of KDC being used. Either mit-kdc or active-directory
-    </description>
-    <value>mit-kdc</value>
-    <display-name>KDC type</display-name>
-    <value-attributes>
-      <type>componentHost</type>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>manage_identities</name>
-    <description>
-      Indicates whether the Ambari user and service Kerberos identities 
(principals and keytab files)
-      should be managed (created, deleted, updated, etc...) by Ambari or 
managed manually.
-    </description>
-    <value>true</value>
-    <display-name>Manage Kerberos Identities</display-name>
-    <value-attributes>
-      <visible>false</visible>
-      <overridable>false</overridable>
-      <type>boolean</type>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>install_packages</name>
-    <display-name>Install OS-specific Kerberos client package(s)</display-name>
-    <description>
-      Indicates whether Ambari should install the Kerberos client package(s) 
or not. If not, it is
-      expected that Kerberos utility programs (such as kadmin, kinit, klist, 
and kdestroy) are
-      compatible with MIT Kerberos 5 version 1.10.3 in command line options 
and behaviors.
-    </description>
-    <value>true</value>
-    <value-attributes>
-      <type>boolean</type>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property require-input="true">
-    <name>ldap_url</name>
-    <display-name>LDAP url</display-name>
-    <description>
-      The URL to the Active Directory LDAP Interface
-    </description>
-    <value/>
-    <value-attributes>
-      <type>host</type>
-      <visible>false</visible>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property require-input="true">
-    <name>container_dn</name>
-    <display-name>Container DN</display-name>
-    <description>
-      The distinguished name (DN) of the container used store service 
principals
-    </description>
-    <value-attributes>
-      <visible>false</visible>
-      <overridable>false</overridable>
-    </value-attributes>
-    <value/>
-  </property>
-
-  <property require-input="true">
-    <name>encryption_types</name>
-    <display-name>Encryption Types</display-name>
-    <description>
-      The supported list of session key encryption types that should be 
returned by the KDC.
-    </description>
-    <value>aes des3-cbc-sha1 rc4 des-cbc-md5</value>
-    <value-attributes>
-      <type>multiLine</type>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property require-input="true">
-    <name>realm</name>
-    <description>
-      The default realm to use when creating service principals
-    </description>
-    <display-name>Realm name</display-name>
-    <value/>
-    <value-attributes>
-      <type>host</type>
-      <editable-only-at-install>true</editable-only-at-install>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property require-input="true">
-    <name>kdc_host</name>
-    <description>
-      The IP address or FQDN for the KDC host. Optionally a port number may be 
included.
-    </description>
-    <display-name>KDC host</display-name>
-    <value/>
-    <value-attributes>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>admin_server_host</name>
-    <display-name>Kadmin host</display-name>
-    <description>
-      The IP address or FQDN for the KDC Kerberos administrative host. 
Optionally a port number may be included.
-    </description>
-    <value/>
-    <value-attributes>
-      <type>host</type>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>executable_search_paths</name>
-    <display-name>Executable Search Paths</display-name>
-    <description>
-      A comma-delimited list of search paths to use to find Kerberos utilities 
like kadmin and kinit.
-    </description>
-    <value>/usr/bin, /usr/kerberos/bin, /usr/sbin, /usr/lib/mit/bin, 
/usr/lib/mit/sbin</value>
-    <value-attributes>
-      <overridable>false</overridable>
-      <type>multiLine</type>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>password_length</name>
-    <display-name>Password Length</display-name>
-    <description>
-      The length required length for generated passwords.
-    </description>
-    <value>20</value>
-    <value-attributes>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>password_min_lowercase_letters</name>
-    <display-name>Password Minimum # Lowercase Letters</display-name>
-    <description>
-      The minimum number of lowercase letters (a-z) required in generated 
passwords
-    </description>
-    <value>1</value>
-    <value-attributes>
-      <type>int</type>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>password_min_uppercase_letters</name>
-    <display-name>Password Minimum # Uppercase Letters</display-name>
-    <description>
-      The minimum number of uppercase letters (A-Z) required in generated 
passwords
-    </description>
-    <value>1</value>
-    <value-attributes>
-      <type>int</type>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>password_min_digits</name>
-    <display-name>Password Minimum # Digits</display-name>
-    <description>
-      The minimum number of digits (0-9) required in generated passwords
-    </description>
-    <value>1</value>
-    <value-attributes>
-      <type>int</type>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>password_min_punctuation</name>
-    <display-name>Password Minimum # Punctuation Characters</display-name>
-    <description>
-      The minimum number of punctuation characters (?.!$%^*()-_+=~) required 
in generated passwords
-    </description>
-    <value>1</value>
-    <value-attributes>
-      <type>int</type>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>password_min_whitespace</name>
-    <display-name>Password Minimum # Whitespace Characters</display-name>
-    <description>
-      The minimum number of whitespace characters required in generated 
passwords
-    </description>
-    <value>0</value>
-    <value-attributes>
-      <type>int</type>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>service_check_principal_name</name>
-    <display-name>Test Kerberos Principal</display-name>
-    <description>
-      The principal name to use when executing the Kerberos service check
-    </description>
-    <value>${cluster_name}-${short_date}</value>
-    <value-attributes>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>case_insensitive_username_rules</name>
-    <display-name>Enable case insensitive username rules</display-name>
-    <description>
-      Force principal names to resolve to lowercase local usernames in 
auth-to-local rules
-    </description>
-    <value>false</value>
-    <value-attributes>
-      <overridable>false</overridable>
-      <type>boolean</type>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>ad_create_attributes_template</name>
-    <display-name>Account Attribute Template</display-name>
-    <description>
-      A Velocity template to use to generate a JSON-formatted document 
containing the set of
-      attribute names and values needed to create a new Kerberos identity in 
the relevant
-      Active Directory.
-      Variables include:
-      principal_name, principal_primary, principal_instance, realm, 
realm_lowercase,
-      normalized_principal, principal digest, password, is_service, 
container_dn
-    </description>
-    <value>
-{
-  "objectClass": ["top", "person", "organizationalPerson", "user"],
-  "cn": "$principal_name",
-  #if( $is_service )
-  "servicePrincipalName": "$principal_name",
-  #end
-  "userPrincipalName": "$normalized_principal",
-  "unicodePwd": "$password",
-  "accountExpires": "0",
-  "userAccountControl": "66048"
-}
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <empty-value-valid>true</empty-value-valid>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>kdc_create_attributes</name>
-    <display-name>Principal Attributes</display-name>
-    <description>
-      The set of attributes to use when creating a new Kerberos identity in 
the relevant (MIT) KDC.
-    </description>
-    <value/>
-    <value-attributes>
-      <empty-value-valid>true</empty-value-valid>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/configuration/krb5-conf.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/configuration/krb5-conf.xml
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/configuration/krb5-conf.xml
deleted file mode 100644
index 6780d2e..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/configuration/krb5-conf.xml
+++ /dev/null
@@ -1,100 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-  <property require-input="false">
-    <name>domains</name>
-    <display-name>Domains</display-name>
-    <description>
-      A comma-separated list of domain names used to map server host names to 
the Realm name (e.g. .example.com,example.com). This is optional
-    </description>
-    <value/>
-    <value-attributes>
-      <empty-value-valid>true</empty-value-valid>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>manage_krb5_conf</name>
-    <display-name>Manage Kerberos client krb5.conf</display-name>
-    <description>
-      Indicates whether your krb5.conf file should be managed by the wizard or 
should you manage it yourself
-    </description>
-    <value>true</value>
-    <value-attributes>
-      <overridable>false</overridable>
-      <type>boolean</type>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>conf_dir</name>
-    <display-name>krb5-conf directory path</display-name>
-    <description>The krb5.conf configuration directory</description>
-    <value>/etc</value>
-    <value-attributes>
-      <type>directory</type>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>krb5-conf template</display-name>
-    <description>Customizable krb5.conf template (Jinja template 
engine)</description>
-    <value>
-[libdefaults]
-  renew_lifetime = 7d
-  forwardable = true
-  default_realm = {{realm}}
-  ticket_lifetime = 24h
-  dns_lookup_realm = false
-  dns_lookup_kdc = false
-  #default_tgs_enctypes = {{encryption_types}}
-  #default_tkt_enctypes = {{encryption_types}}
-
-{% if domains %}
-[domain_realm]
-{% for domain in domains.split(',') %}
-  {{domain}} = {{realm}}
-{% endfor %}
-{% endif %}
-
-[logging]
-  default = FILE:/var/log/krb5kdc.log
-  admin_server = FILE:/var/log/kadmind.log
-  kdc = FILE:/var/log/krb5kdc.log
-
-[realms]
-  {{realm}} = {
-    admin_server = {{admin_server_host|default(kdc_host, True)}}
-    kdc = {{kdc_host}}
-  }
-
-{# Append additional realm declarations below #}
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <overridable>false</overridable>
-    </value-attributes>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/kerberos.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/kerberos.json
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/kerberos.json
deleted file mode 100644
index 6ab7610..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/kerberos.json
+++ /dev/null
@@ -1,17 +0,0 @@
-{
-  "services": [
-    {
-      "name": "KERBEROS",
-      "identities": [
-        {
-          "name": "/smokeuser"
-        }
-      ],
-      "components": [
-        {
-          "name": "KERBEROS_CLIENT"
-        }
-      ]
-    }
-  ]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/metainfo.xml
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/metainfo.xml
deleted file mode 100644
index 4436a46..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/metainfo.xml
+++ /dev/null
@@ -1,147 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>KERBEROS</name>
-      <displayName>Kerberos</displayName>
-      <comment>A computer network authentication protocol which works on
-        the basis of 'tickets' to allow nodes communicating over a
-        non-secure network to prove their identity to one another in a
-        secure manner.
-      </comment>
-      <version>1.10.3.</version>
-
-      <components>
-        <component>
-          <name>KERBEROS_CLIENT</name>
-          <displayName>Kerberos Client</displayName>
-          <category>CLIENT</category>
-          <cardinality>ALL</cardinality>
-          <versionAdvertised>false</versionAdvertised>
-          <auto-deploy>
-            <enabled>true</enabled>
-          </auto-deploy>
-          <commandScript>
-            <script>scripts/kerberos_client.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>1200</timeout>
-          </commandScript>
-          <customCommands>
-            <customCommand>
-              <name>SET_KEYTAB</name>
-              <commandScript>
-                <script>scripts/kerberos_client.py</script>
-                <scriptType>PYTHON</scriptType>
-                <timeout>1000</timeout>
-              </commandScript>
-            </customCommand>
-            <customCommand>
-              <name>REMOVE_KEYTAB</name>
-              <commandScript>
-                <script>scripts/kerberos_client.py</script>
-                <scriptType>PYTHON</scriptType>
-                <timeout>1000</timeout>
-              </commandScript>
-            </customCommand>
-          </customCommands>
-          <configFiles>
-            <configFile>
-              <type>env</type>
-              <fileName>krb5.conf</fileName>
-              <dictionaryName>krb5-conf</dictionaryName>
-            </configFile>
-          </configFiles>
-        </component>
-      </components>
-
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>redhat7,redhat6</osFamily>
-          <packages>
-            <package>
-              <name>krb5-server</name>
-              <skipUpgrade>true</skipUpgrade>
-            </package>
-            <package>
-              <name>krb5-libs</name>
-              <skipUpgrade>true</skipUpgrade>
-            </package>
-            <package>
-              <name>krb5-workstation</name>
-              <skipUpgrade>true</skipUpgrade>
-            </package>
-          </packages>
-        </osSpecific>
-
-        <osSpecific>
-          <osFamily>debian7,ubuntu12,ubuntu14</osFamily>
-          <packages>
-            <package>
-              <name>krb5-kdc</name>
-              <skipUpgrade>true</skipUpgrade>
-            </package>
-            <package>
-              <name>krb5-admin-server</name>
-              <skipUpgrade>true</skipUpgrade>
-            </package>
-            <package>
-              <name>krb5-user</name>
-              <skipUpgrade>true</skipUpgrade>
-            </package>
-            <package>
-              <name>krb5-config</name>
-              <skipUpgrade>true</skipUpgrade>
-            </package>
-          </packages>
-        </osSpecific>
-
-        <osSpecific>
-          <osFamily>suse11</osFamily>
-          <packages>
-            <package>
-              <name>krb5</name>
-              <skipUpgrade>true</skipUpgrade>
-            </package>
-            <package>
-              <name>krb5-client</name>
-              <skipUpgrade>true</skipUpgrade>
-            </package>
-            <package>
-              <name>krb5-server</name>
-              <skipUpgrade>true</skipUpgrade>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-
-      <configuration-dependencies>
-        <config-type>krb5-conf</config-type>
-        <config-type>kerberos-env</config-type>
-      </configuration-dependencies>
-      <restartRequiredAfterChange>true</restartRequiredAfterChange>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/package/scripts/kerberos_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/package/scripts/kerberos_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/package/scripts/kerberos_client.py
deleted file mode 100755
index e014349..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/package/scripts/kerberos_client.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from kerberos_common import *
-from resource_management.libraries.functions.security_commons import 
cached_kinit_executor
-
-class KerberosClient(KerberosScript):
-  def install(self, env):
-    install_packages = 
default('/configurations/kerberos-env/install_packages', "true")
-    if install_packages:
-      self.install_packages(env, ['krb5-server', 'krb5-libs', 
'krb5-auth-dialog', 'krb5', 'krb5-kdc', 'krb5-admin-server'])
-    else:
-      print "Kerberos client packages are not being installed, manual 
installation is required."
-
-    self.configure(env)
-
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    if params.manage_krb5_conf:
-      self.write_krb5_conf()
-    #delete krb cache to prevent using old krb tickets on fresh kerberos setup
-    self.clear_tmp_cache()
-
-    #self.setup_jce()
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-  def security_status(self, env):
-    import status_params
-    if status_params.security_enabled:
-      if status_params.smoke_user and status_params.smoke_user_keytab:
-        try:
-          cached_kinit_executor(status_params.kinit_path_local,
-                                status_params.smoke_user,
-                                status_params.smoke_user_keytab,
-                                status_params.smoke_user_principal,
-                                status_params.hostname,
-                                status_params.tmp_dir)
-          self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-        except Exception as e:
-          self.put_structured_out({"securityState": "ERROR"})
-          self.put_structured_out({"securityStateErrorInfo": str(e)})
-      else:
-        self.put_structured_out({"securityState": "UNKNOWN"})
-        self.put_structured_out({"securityStateErrorInfo": "Missing smoke user 
credentials"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
-
-  def set_keytab(self, env):
-    self.write_keytab_file()
-
-  def remove_keytab(self, env):
-    self.delete_keytab_file()
-
-  #def download_install_jce(self, env):
-  #  self.setup_jce()
-
-
-if __name__ == "__main__":
-  KerberosClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/package/scripts/kerberos_common.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/package/scripts/kerberos_common.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/package/scripts/kerberos_common.py
deleted file mode 100755
index e606064..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/KERBEROS/package/scripts/kerberos_common.py
+++ /dev/null
@@ -1,473 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import base64
-import os
-import string
-import subprocess
-import sys
-import tempfile
-from tempfile import gettempdir
-
-from resource_management import *
-from utils import get_property_value
-from ambari_commons.os_utils import remove_file
-from ambari_agent import Constants
-
-class KerberosScript(Script):
-  KRB5_REALM_PROPERTIES = [
-    'kdc',
-    'admin_server',
-    'default_domain',
-    'master_kdc'
-  ]
-
-  KRB5_SECTION_NAMES = [
-    'libdefaults',
-    'logging',
-    'realms',
-    'domain_realm',
-    'capaths',
-    'ca_paths',
-    'appdefaults',
-    'plugins'
-  ]
-
-  @staticmethod
-  def create_random_password():
-    import random
-
-    chars = string.digits + string.ascii_letters
-    return ''.join(random.choice(chars) for x in range(13))
-
-  @staticmethod
-  def write_conf_section(output_file, section_name, section_data):
-    if section_name is not None:
-      output_file.write('[%s]\n' % section_name)
-
-      if section_data is not None:
-        for key, value in section_data.iteritems():
-          output_file.write(" %s = %s\n" % (key, value))
-
-
-  @staticmethod
-  def _write_conf_realm(output_file, realm_name, realm_data):
-    """ Writes out realm details
-
-    Example:
-
-     EXAMPLE.COM = {
-      kdc = kerberos.example.com
-      admin_server = kerberos.example.com
-     }
-
-    """
-    if realm_name is not None:
-      output_file.write(" %s = {\n" % realm_name)
-
-      if realm_data is not None:
-        for key, value in realm_data.iteritems():
-          if key in KerberosScript.KRB5_REALM_PROPERTIES:
-            output_file.write("  %s = %s\n" % (key, value))
-
-      output_file.write(" }\n")
-
-  @staticmethod
-  def write_conf_realms_section(output_file, section_name, realms_data):
-    if section_name is not None:
-      output_file.write('[%s]\n' % section_name)
-
-      if realms_data is not None:
-        for realm, realm_data in realms_data.iteritems():
-          KerberosScript._write_conf_realm(output_file, realm, realm_data)
-          output_file.write('\n')
-
-  @staticmethod
-  def write_krb5_conf():
-    import params
-
-    Directory(params.krb5_conf_dir,
-              owner='root',
-              recursive=True,
-              group='root',
-              mode=0755
-    )
-
-    if (params.krb5_conf_template is None) or not 
params.krb5_conf_template.strip():
-      content = Template('krb5_conf.j2')
-    else:
-      content = InlineTemplate(params.krb5_conf_template)
-
-    File(params.krb5_conf_path,
-         content=content,
-         owner='root',
-         group='root',
-         mode=0644
-    )
-
-  @staticmethod
-  def invoke_kadmin(query, admin_identity=None, default_realm=None):
-    """
-    Executes the kadmin or kadmin.local command (depending on whether 
auth_identity is set or not
-    and returns command result code and standard out data.
-
-    :param query: the kadmin query to execute
-    :param admin_identity: the identity for the administrative user (optional)
-    :param default_realm: the default realm to assume
-    :return: return_code, out
-    """
-    if (query is not None) and (len(query) > 0):
-      auth_principal = None
-      auth_keytab_file = None
-
-      if admin_identity is not None:
-        auth_principal = get_property_value(admin_identity, 'principal')
-
-      if auth_principal is None:
-        kadmin = 'kadmin.local'
-        credential = ''
-      else:
-        kadmin = 'kadmin -p "%s"' % auth_principal
-
-        auth_password = get_property_value(admin_identity, 'password')
-
-        if auth_password is None:
-          auth_keytab = get_property_value(admin_identity, 'keytab')
-
-          if auth_keytab is not None:
-            (fd, auth_keytab_file) = tempfile.mkstemp()
-            os.write(fd, base64.b64decode(auth_keytab))
-            os.close(fd)
-
-          credential = '-k -t %s' % auth_keytab_file
-        else:
-          credential = '-w "%s"' % auth_password
-
-      if (default_realm is not None) and (len(default_realm) > 0):
-        realm = '-r %s' % default_realm
-      else:
-        realm = ''
-
-      try:
-        command = '%s %s %s -q "%s"' % (kadmin, credential, realm, 
query.replace('"', '\\"'))
-        return shell.checked_call(command)
-      except:
-        raise
-      finally:
-        if auth_keytab_file is not None:
-          os.remove(auth_keytab_file)
-
-  @staticmethod
-  def create_keytab_file(principal, path, auth_identity=None):
-    success = False
-
-    if (principal is not None) and (len(principal) > 0):
-      if (auth_identity is None) or (len(auth_identity) == 0):
-        norandkey = '-norandkey'
-      else:
-        norandkey = ''
-
-      if (path is not None) and (len(path) > 0):
-        keytab_file = '-k %s' % path
-      else:
-        keytab_file = ''
-
-      try:
-        result_code, output = KerberosScript.invoke_kadmin(
-          'ktadd %s %s %s' % (keytab_file, norandkey, principal),
-          auth_identity)
-
-        success = (result_code == 0)
-      except:
-        raise Fail("Failed to create keytab for principal: %s (in %s)" % 
(principal, path))
-
-    return success
-
-  @staticmethod
-  def create_keytab(principal, auth_identity=None):
-    keytab = None
-
-    (fd, temp_path) = tempfile.mkstemp()
-    os.remove(temp_path)
-
-    try:
-      if KerberosScript.create_keytab_file(principal, temp_path, 
auth_identity):
-        with open(temp_path, 'r') as f:
-          keytab = base64.b64encode(f.read())
-    finally:
-      if os.path.isfile(temp_path):
-        os.remove(temp_path)
-
-    return keytab
-
-  @staticmethod
-  def principal_exists(identity, auth_identity=None):
-    exists = False
-
-    if identity is not None:
-      principal = get_property_value(identity, 'principal')
-
-      if (principal is not None) and (len(principal) > 0):
-        try:
-          result_code, output = KerberosScript.invoke_kadmin('getprinc %s' % 
principal,
-                                                             auth_identity)
-          exists = (output is not None) and (("Principal: %s" % principal) in 
output)
-        except:
-          raise Fail("Failed to determine if principal exists: %s" % principal)
-
-    return exists
-
-  @staticmethod
-  def change_principal_password(identity, auth_identity=None):
-    success = False
-
-    if identity is not None:
-      principal = get_property_value(identity, 'principal')
-
-      if (principal is not None) and (len(principal) > 0):
-        password = get_property_value(identity, 'password')
-
-        if password is None:
-          credentials = '-randkey'
-        else:
-          credentials = '-pw "%s"' % password
-
-        try:
-          result_code, output = KerberosScript.invoke_kadmin(
-            'change_password %s %s' % (credentials, principal),
-            auth_identity)
-
-          success = (result_code == 0)
-        except:
-          raise Fail("Failed to create principal: %s" % principal)
-
-    return success
-
-  @staticmethod
-  def create_principal(identity, auth_identity=None):
-    success = False
-
-    if identity is not None:
-      principal = get_property_value(identity, 'principal')
-
-      if (principal is not None) and (len(principal) > 0):
-        password = get_property_value(identity, 'password')
-
-        if password is None:
-          credentials = '-randkey'
-        else:
-          credentials = '-pw "%s"' % password
-
-        try:
-          result_code, out = KerberosScript.invoke_kadmin(
-            'addprinc %s %s' % (credentials, principal),
-            auth_identity)
-
-          success = (result_code == 0)
-        except:
-          raise Fail("Failed to create principal: %s" % principal)
-
-    return success
-
-  @staticmethod
-  def clear_tmp_cache():
-    tmp_dir = Constants.AGENT_TMP_DIR
-    if tmp_dir is None:
-      tmp_dir = gettempdir()
-    curl_krb_cache_path = os.path.join(tmp_dir, "curl_krb_cache")
-    Directory(curl_krb_cache_path, action="delete")
-
-  @staticmethod
-  def create_principals(identities, auth_identity=None):
-    if identities is not None:
-      for identity in identities:
-        KerberosScript.create_principal(identity, auth_identity)
-
-  @staticmethod
-  def create_or_update_administrator_identity():
-    import params
-
-    if params.realm is not None:
-      admin_identity = params.get_property_value(params.realm, 
'admin_identity')
-
-      if KerberosScript.principal_exists(admin_identity):
-        KerberosScript.change_principal_password(admin_identity)
-      else:
-        KerberosScript.create_principal(admin_identity)
-
-  @staticmethod
-  def test_kinit(identity, user=None):
-    principal = get_property_value(identity, 'principal')
-    kinit_path_local = 
functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',
 None))
-    kdestroy_path_local = 
functions.get_kdestroy_path(default('/configurations/kerberos-env/executable_search_paths',
 None))
-
-    if principal is not None:
-      keytab_file = get_property_value(identity, 'keytab_file')
-      keytab = get_property_value(identity, 'keytab')
-      password = get_property_value(identity, 'password')
-
-      # If a test keytab file is available, simply use it
-      if (keytab_file is not None) and (os.path.isfile(keytab_file)):
-        command = '%s -k -t %s %s' % (kinit_path_local, keytab_file, principal)
-        Execute(command,
-          user = user,
-        )
-        return shell.checked_call(kdestroy_path_local)
-
-      # If base64-encoded test keytab data is available; then decode it, write 
it to a temporary file
-      # use it, and then remove the temporary file
-      elif keytab is not None:
-        (fd, test_keytab_file) = tempfile.mkstemp()
-        os.write(fd, base64.b64decode(keytab))
-        os.close(fd)
-
-        try:
-          command = '%s -k -t %s %s' % (kinit_path_local, test_keytab_file, 
principal)
-          Execute(command,
-            user = user,
-          )
-          return shell.checked_call(kdestroy_path_local)
-        except:
-          raise
-        finally:
-          if test_keytab_file is not None:
-            os.remove(test_keytab_file)
-
-      # If no keytab data is available and a password was supplied, simply use 
it.
-      elif password is not None:
-        process = subprocess.Popen([kinit_path_local, principal], 
stdin=subprocess.PIPE)
-        stdout, stderr = process.communicate(password)
-        if process.returncode:
-          err_msg = Logger.filter_text("Execution of kinit returned %d. %s" % 
(process.returncode, stderr))
-          raise Fail(err_msg)
-        else:
-          return shell.checked_call(kdestroy_path_local)
-      else:
-        return 0, ''
-    else:
-      return 0, ''
-
-
-  def write_keytab_file(self):
-    import params
-    import stat
-
-    if params.kerberos_command_params is not None:
-      for item  in params.kerberos_command_params:
-        keytab_content_base64 = get_property_value(item, 
'keytab_content_base64')
-        if (keytab_content_base64 is not None) and (len(keytab_content_base64) 
> 0):
-          keytab_file_path = get_property_value(item, 'keytab_file_path')
-          if (keytab_file_path is not None) and (len(keytab_file_path) > 0):
-            head, tail = os.path.split(keytab_file_path)
-            if head:
-              Directory(head, recursive=True, mode=0755, owner="root", 
group="root")
-
-            owner = get_property_value(item, 'keytab_file_owner_name')
-            owner_access = get_property_value(item, 'keytab_file_owner_access')
-            group = get_property_value(item, 'keytab_file_group_name')
-            group_access = get_property_value(item, 'keytab_file_group_access')
-            mode = 0
-
-            if owner_access == 'rw':
-              mode |= stat.S_IREAD | stat.S_IWRITE
-            else:
-              mode |= stat.S_IREAD
-
-            if group_access == 'rw':
-              mode |= stat.S_IRGRP | stat.S_IWGRP
-            elif group_access == 'r':
-              mode |= stat.S_IRGRP
-
-            keytab_content = base64.b64decode(keytab_content_base64)
-
-            # to hide content in command output
-            def make_lambda(data):
-              return lambda: data
-
-            File(keytab_file_path,
-                 content=make_lambda(keytab_content),
-                 mode=mode,
-                 owner=owner,
-                 group=group)
-
-            principal = get_property_value(item, 'principal')
-            if principal is not None:
-              curr_content = Script.structuredOut
-
-              if "keytabs" not in curr_content:
-                curr_content['keytabs'] = {}
-
-              curr_content['keytabs'][principal.replace("_HOST", 
params.hostname)] = keytab_file_path
-
-              self.put_structured_out(curr_content)
-
-  def delete_keytab_file(self):
-    import params
-
-    if params.kerberos_command_params is not None:
-      for item in params.kerberos_command_params:
-        keytab_file_path = get_property_value(item, 'keytab_file_path')
-        if (keytab_file_path is not None) and (len(keytab_file_path) > 0):
-
-          # Delete the keytab file
-          File(keytab_file_path, action="delete")
-
-          principal = get_property_value(item, 'principal')
-          if principal is not None:
-            curr_content = Script.structuredOut
-
-            if "keytabs" not in curr_content:
-              curr_content['keytabs'] = {}
-
-            curr_content['keytabs'][principal.replace("_HOST", 
params.hostname)] = '_REMOVED_'
-
-            self.put_structured_out(curr_content)
-
-  def setup_jce(self):
-    import params
-
-    if not params.jdk_name:
-      return
-    jce_curl_target = None
-    if params.jce_policy_zip is not None:
-      jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
-      Directory(params.artifact_dir,
-                recursive = True,
-                )
-      File(jce_curl_target,
-           content = DownloadSource(format("{jce_location}/{jce_policy_zip}")),
-           )
-    elif params.security_enabled:
-      # Something weird is happening
-      raise Fail("Security is enabled, but JCE policy zip is not specified.")
-
-    # The extraction will occur only after the security flag is set
-    if params.security_enabled:
-      security_dir = format("{java_home}/jre/lib/security")
-
-      File([format("{security_dir}/US_export_policy.jar"), 
format("{security_dir}/local_policy.jar")],
-           action = "delete",
-           )
-
-      extract_cmd = ("unzip", "-o", "-j", "-q", jce_curl_target, "-d", 
security_dir)
-      Execute(extract_cmd,
-              only_if = format("test -e {security_dir} && test -f 
{jce_curl_target}"),
-              path = ['/bin/','/usr/bin'],
-              sudo = True
-      )

Reply via email to