http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/metainfo.xml~
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/metainfo.xml~
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/metainfo.xml~
deleted file mode 100644
index b1ad21c..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/metainfo.xml~
+++ /dev/null
@@ -1,190 +0,0 @@
-<?xml version="1.0"?>
-<!--Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>SPARK</name>
-      <displayName>Spark</displayName>
-      <comment>Apache Spark is a fast and general engine for large-scale data 
processing</comment>
-      <version>1.6.1.4.2</version>
-      <components>
-        <component>
-          <name>SPARK_JOBHISTORYSERVER</name>
-          <displayName>Spark History Server</displayName>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <versionAdvertised>true</versionAdvertised>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-               <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
-               <scope>host</scope>
-               <auto-deploy>
-                 <enabled>true</enabled>
-               </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>YARN/YARN_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-             </auto-deploy>
-           </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/job_history_server.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-        <component>
-          <name>SPARK_THRIFTSERVER</name>
-          <displayName>Spark Thrift Server</displayName>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <versionAdvertised>true</versionAdvertised>
-          <dependencies>
-            <dependency>
-              <name>HIVE/HIVE_METASTORE</name>
-              <scope>cluster</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>HIVE/HIVE_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/thrift_server.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-        <component>
-          <name>SPARK_CLIENT</name>
-          <displayName>Spark Client</displayName>
-          <category>CLIENT</category>
-          <cardinality>1+</cardinality>
-          <versionAdvertised>true</versionAdvertised>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-               <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
-               <scope>host</scope>
-               <auto-deploy>
-                 <enabled>true</enabled>
-               </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>YARN/YARN_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-             </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/spark_client.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>env</type>
-              <fileName>spark-env.sh</fileName>
-              <dictionaryName>spark-env</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>properties</type>
-              <fileName>spark-defaults.conf</fileName>
-              <dictionaryName>spark-defaults</dictionaryName>
-            </configFile>
-            <configFile>
-              <type>env</type>
-              <fileName>log4j.properties</fileName>
-              <dictionaryName>spark-log4j</dictionaryName>
-            </configFile>                          
-          </configFiles>
-        </component>
-      </components>
-
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>redhat7,redhat6,suse11</osFamily>
-          <packages>
-            <package>
-              <name>spark-core_4_2_0*</name>
-            </package>
-            <package>
-              <name>spark_4_2_0*</name>
-            </package>
-          </packages>
-        </osSpecific>
-        <osSpecific>
-          <osFamily>ubuntu12</osFamily>
-          <packages>
-            <package>
-              <name>spark-core_4_2_0*</name>
-            </package>
-            <package>
-               <name>spark_4_2_0*</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-      <configuration-dependencies>
-        <config-type>spark-env</config-type>
-        <config-type>spark-defaults</config-type>
-        <config-type>spark-log4j</config-type>
-      </configuration-dependencies>
-
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-
-      <requiredServices>
-        <service>HDFS</service>
-        <service>YARN</service>
-        <service>HIVE</service>
-      </requiredServices>
-
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/job_history_server.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/job_history_server.py
deleted file mode 100644
index 026086c..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/job_history_server.py
+++ /dev/null
@@ -1,167 +0,0 @@
-#!/usr/bin/python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-import os
-from resource_management import *
-from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import iop_select
-#from resource_management.libraries.functions.version import compare_versions, 
format_hdp_stack_version
-from resource_management.core.exceptions import ComponentIsNotRunning
-from resource_management.core.logger import Logger
-from resource_management.core import shell
-from spark import *
-
-
-class JobHistoryServer(Script):
-
-  def get_stack_to_component(self):
-     return {"BigInsights": "spark-historyserver"}
-
-  def pre_upgrade_restart(self, env, upgrade_type=None):
-    import params
-
-    env.set_params(params)
-    if params.version and 
compare_versions(format_hdp_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "spark", params.version)
-      iop_select.select("spark-historyserver", params.version)
-      #Execute(format("iop-select set spark-historyserver {version}"))
-
-  def install(self, env):
-    self.install_packages(env)
-    import params
-
-    env.set_params(params)
-    self.configure(env)
-
-  def stop(self, env, upgrade_type=None):
-    import params
-
-    env.set_params(params)
-    self.configure(env)
-    daemon_cmd = format('{spark_history_server_stop}')
-    Execute(daemon_cmd,
-            user=params.spark_user,
-            environment={'JAVA_HOME': params.java_home}
-    )
-    if os.path.isfile(params.spark_history_server_pid_file):
-      os.remove(params.spark_history_server_pid_file)
-
-
-  def start(self, env, upgrade_type=None):
-    import params
-
-    env.set_params(params)
-    self.configure(env)
-    self.create_historyServer_directory()
-    self.copy_spark_yarn_jar()
-
-    if params.security_enabled:
-      spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} 
{spark_principal}; ")
-      Execute(spark_kinit_cmd, user=params.spark_user)
-
-    # FIXME! TODO! remove this after soft link bug is fixed:
-    #if not os.path.islink('/usr/iop/current/spark'):
-    #  iop_version = get_iop_version()
-    #  cmd = 'ln -s /usr/iop/' + iop_version + '/spark /usr/iop/current/spark'
-    #  Execute(cmd)
-
-    daemon_cmd = format('{spark_history_server_start}')
-    no_op_test = format(
-      'ls {spark_history_server_pid_file} >/dev/null 2>&1 && ps -p `cat 
{spark_history_server_pid_file}` >/dev/null 2>&1')
-    Execute(daemon_cmd,
-            user=params.spark_user,
-            environment={'JAVA_HOME': params.java_home},
-            not_if=no_op_test
-    )
-
-  def status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    pid_file = format("{spark_history_server_pid_file}")
-    # Recursively check all existing pid files
-    check_process_status(pid_file)
-
-  def create_historyServer_directory(self):
-    import params
-
-    params.HdfsResource(params.spark_hdfs_user_dir,
-                         type="directory",
-                         action="create_on_execute",
-                         owner=params.spark_user,
-                         group=params.user_group,
-                         mode=params.spark_hdfs_user_mode)
-
-    params.HdfsResource(params.spark_eventlog_dir_default,
-                         type="directory",
-                         action="create_on_execute",
-                         owner=params.spark_user,
-                         group=params.user_group,
-                         mode=params.spark_eventlog_dir_mode)
-    
-    params.HdfsResource(None, action="execute")
-
-  def copy_spark_yarn_jar(self):
-    import params
-
-    jar_src_file = params.spark_jar_src_dir + "/" + params.spark_jar_src_file
-    jar_dst_file = params.spark_jar_hdfs_dir + "/" + params.spark_jar_src_file
-    jar_dst_path = params.spark_jar_hdfs_dir
-
-    # Remove to enable refreshing jars during restart
-    hdfs_remove_cmd = "dfs -rm -R -skipTrash %s" % jar_dst_path
-
-    try:
-      ExecuteHadoop(hdfs_remove_cmd,
-                    user=params.hdfs_user,
-                    logoutput=True,
-                    conf_dir=params.hadoop_conf_dir,
-                    bin_dir=params.hadoop_bin_dir)
-    except Fail:
-      pass
- 
-    params.HdfsResource(jar_dst_path,
-                        type="directory",
-                        action="create_on_execute",
-                        owner=params.spark_user,
-                        group=params.user_group,
-                        mode=params.spark_jar_hdfs_dir_mode)
-    
-    params.HdfsResource(None, action="execute")
-    
-    params.HdfsResource(InlineTemplate(jar_dst_file).get_content(),
-                        type="file",
-                        action="create_on_execute",
-                        source=jar_src_file,
-                        owner=params.spark_user,
-                        group=params.user_group,
-                        mode=params.spark_jar_file_mode)
-
-    params.HdfsResource(None, action="execute")
-
-  def configure(self, env):
-    import params
-
-    env.set_params(params)
-    spark(env)
-
-if __name__ == "__main__":
-  JobHistoryServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/params.py
deleted file mode 100644
index 13c3b00..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/params.py
+++ /dev/null
@@ -1,215 +0,0 @@
-#!/usr/bin/python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management.libraries.functions.default import default
-from resource_management import *
-from resource_management.libraries.functions import conf_select
-from spark import *
-import status_params
-
-
-# a map of the Ambari role to the component name
-# for use with /usr/iop/current/<component>
-SERVER_ROLE_DIRECTORY_MAP = {
-  'SPARK_JOBHISTORYSERVER' : 'spark-historyserver',
-  'SPARK_CLIENT' : 'spark-client',
-  'SPARK_THRIFTSERVER' : 'spark-thriftserver'
-}
-upgrade_direction = default("/commandParams/upgrade_direction", None)
-
-component_directory = 
Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "SPARK_CLIENT")
-
-config = Script.get_config()
-tmp_dir = Script.get_tmp_dir()
-iop_full_version = get_iop_version()
-
-# New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
-version = default("/commandParams/version", None)
-stack_name = default("/hostLevelParams/stack_name", None)
-
-hadoop_home = "/usr/iop/current/hadoop-client"
-spark_conf = format("/usr/iop/current/{component_directory}/conf")
-spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
-spark_pid_dir = status_params.spark_pid_dir
-spark_role_root = "spark-client"
-
-command_role = default("/role", "")
-
-if command_role == "SPARK_CLIENT":
-  spark_role_root = "spark-client"
-elif command_role == "SPARK_JOBHISTORYSERVER":
-  spark_role_root = "spark-historyserver"
-elif command_role == "SPARK_THRIFTSERVER":
-  spark_role_root = "spark-thriftserver"
-
-spark_home = format("/usr/iop/current/{spark_role_root}")
-if not os.path.exists(spark_home):
-  os.symlink('/usr/iop/current/spark', spark_home)
-
-java_home = config['hostLevelParams']['java_home']
-
-spark_user = status_params.spark_user
-hive_user = status_params.hive_user
-spark_group = status_params.spark_group
-user_group = status_params.user_group
-
-spark_hdfs_user_dir = format("/user/{spark_user}")
-spark_hdfs_user_mode = 0755
-spark_eventlog_dir_mode = 01777
-spark_jar_hdfs_dir = "/iop/apps/" + str(iop_full_version) + "/spark/jars"
-spark_jar_hdfs_dir_mode = 0755
-spark_jar_file_mode = 0444
-spark_jar_src_dir = "/usr/iop/current/spark-historyserver/lib"
-spark_jar_src_file = "spark-assembly.jar"
-
-spark_history_server_pid_file = status_params.spark_history_server_pid_file
-spark_thrift_server_pid_file = status_params.spark_thrift_server_pid_file
-
-spark_history_server_start = 
format("{spark_home}/sbin/start-history-server.sh")
-spark_history_server_stop = format("{spark_home}/sbin/stop-history-server.sh")
-
-spark_thrift_server_start = format("{spark_home}/sbin/start-thriftserver.sh")
-spark_thrift_server_stop = format("{spark_home}/sbin/stop-thriftserver.sh")
-
-spark_submit_cmd = format("{spark_home}/bin/spark-submit")
-spark_smoke_example = "org.apache.spark.examples.SparkPi"
-spark_service_check_cmd = format(
-  "{spark_submit_cmd} --class {spark_smoke_example}  --master yarn-cluster  
--num-executors 1 --driver-memory 256m  --executor-memory 256m   
--executor-cores 1  {spark_home}/lib/spark-examples*.jar 1")
-
-spark_jobhistoryserver_hosts = 
default("/clusterHostInfo/spark_jobhistoryserver_hosts", [])
-spark_thriftserver_hosts = 
default("/clusterHostInfo/spark_thriftserver_hosts", [])
-namenode_hosts = default("/clusterHostInfo/namenode_host", [])
-has_namenode = not len(namenode_hosts) == 0
-
-if len(spark_jobhistoryserver_hosts) > 0:
-  spark_history_server_host = spark_jobhistoryserver_hosts[0]
-else:
-  spark_history_server_host = "localhost"
-
-if len(spark_thriftserver_hosts) > 0:
-  spark_thrift_server_host = spark_thriftserver_hosts[0]
-else:
-  spark_thrift_server_host = "localhost"
-# spark-defaults params
-if has_namenode: 
-  namenode_host = str(namenode_hosts[0])
-else:
-  namenode_host = "localhost"
-
-hadoop_fs_defaultfs = config['configurations']['core-site']['fs.defaultFS']
-spark_eventlog_dir_default=hadoop_fs_defaultfs + 
config['configurations']['spark-defaults']['spark.eventLog.dir']
-spark_yarn_jar_default=hadoop_fs_defaultfs + '/iop/apps/' + 
str(iop_full_version) + '/spark/jars/spark-assembly.jar'
-
-spark_yarn_applicationMaster_waitTries = default(
-  "/configurations/spark-defaults/spark.yarn.applicationMaster.waitTries", 
'10')
-spark_yarn_submit_file_replication = 
default("/configurations/spark-defaults/spark.yarn.submit.file.replication", 
'3')
-spark_yarn_preserve_staging_files = 
default("/configurations/spark-defaults/spark.yarn.preserve.staging.files", 
"false")
-spark_yarn_scheduler_heartbeat_interval = default(
-  "/configurations/spark-defaults/spark.yarn.scheduler.heartbeat.interval-ms", 
"5000")
-spark_yarn_queue = default("/configurations/spark-defaults/spark.yarn.queue", 
"default")
-spark_yarn_containerLauncherMaxThreads = default(
-  "/configurations/spark-defaults/spark.yarn.containerLauncherMaxThreads", 
"25")
-spark_yarn_max_executor_failures = 
default("/configurations/spark-defaults/spark.yarn.max.executor.failures", "3")
-spark_yarn_executor_memoryOverhead = 
default("/configurations/spark-defaults/spark.yarn.executor.memoryOverhead", 
"384")
-spark_yarn_driver_memoryOverhead = 
default("/configurations/spark-defaults/spark.yarn.driver.memoryOverhead", 
"384")
-spark_history_ui_port = 
default("/configurations/spark-defaults/spark.history.ui.port", "18080")
-spark_thriftserver_port = 
default("/configurations/spark-env/spark_thriftserver_port", "10015")
-spark_eventlog_enabled = 
default("/configurations/spark-defaults/spark.eventLog.enabled", "true")
-spark_eventlog_dir = 
default("/configurations/spark-defaults/spark.eventLog.dir", 
spark_eventlog_dir_default)
-spark_yarn_jar = default("/configurations/spark-defaults/spark.yarn.jar", 
spark_yarn_jar_default)
-spark_thriftserver_ui_port = 4039
-
-# add the properties that cannot be configured thru UI
-spark_conf_properties_map = dict(config['configurations']['spark-defaults'])
-spark_conf_properties_map["spark.yarn.historyServer.address"] = 
spark_history_server_host + ":" + str(spark_history_ui_port)
-spark_conf_properties_map["spark.yarn.jar"] = spark_yarn_jar
-spark_conf_properties_map["spark.eventLog.dir"] = spark_eventlog_dir_default
-
-spark_env_sh = config['configurations']['spark-env']['content']
-spark_log4j = config['configurations']['spark-log4j']['content']
-#spark_metrics_properties = 
config['configurations']['spark-metrics-properties']['content']
-spark_javaopts_properties = 
config['configurations']['spark-javaopts-properties']['content']
-hive_server_host = default("/clusterHostInfo/hive_server_host", [])
-is_hive_installed = not len(hive_server_host) == 0
-
-spark_driver_extraJavaOptions = 
str(config['configurations']['spark-defaults']['spark.driver.extraJavaOptions'])
-if spark_driver_extraJavaOptions.find('-Diop.version') == -1:
-  spark_driver_extraJavaOptions = spark_driver_extraJavaOptions + ' 
-Diop.version=' + str(iop_full_version)
-
-spark_yarn_am_extraJavaOptions = 
str(config['configurations']['spark-defaults']['spark.yarn.am.extraJavaOptions'])
-if spark_yarn_am_extraJavaOptions.find('-Diop.version') == -1:
-  spark_yarn_am_extraJavaOptions = spark_yarn_am_extraJavaOptions + ' 
-Diop.version=' + str(iop_full_version)
-
-spark_javaopts_properties = str(spark_javaopts_properties)
-if spark_javaopts_properties.find('-Diop.version') == -1:
-  spark_javaopts_properties = spark_javaopts_properties+ ' -Diop.version=' + 
str(iop_full_version)
-
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-kinit_path_local = functions.get_kinit_path()
-spark_kerberos_keytab =  
config['configurations']['spark-defaults']['spark.history.kerberos.keytab']
-spark_kerberos_principal =  
config['configurations']['spark-defaults']['spark.history.kerberos.principal']
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-smokeuser_principal = 
config['configurations']['cluster-env']['smokeuser_principal_name']
-if security_enabled:
-  spark_principal = 
spark_kerberos_principal.replace('_HOST',spark_history_server_host.lower())
-# for create_hdfs_directory
-
-# To create hdfs directory
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
-
-hostname = config["hostname"]
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path()
-
-hdfs_site = config['configurations']['hdfs-site']
-default_fs = config['configurations']['core-site']['fs.defaultFS']
-
-# Hiveserver 2 properties
-hive_server2_authentication = 
config['configurations']['hive-site']['hive.server2.authentication']
-hive_transport_mode = 
config['configurations']['hive-site']['hive.server2.transport.mode']
-hive_server_principal = 
config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
-hive_http_endpoint = 
config['configurations']['hive-site']['hive.server2.thrift.http.path']
-hive_ssl = config['configurations']['hive-site']['hive.server2.use.SSL']
-if hive_ssl:
-  hive_ssl_keystore_path = 
str(config['configurations']['hive-site']['hive.server2.keystore.path'])
-  hive_ssl_keystore_password = 
str(config['configurations']['hive-site']['hive.server2.keystore.password'])
-else:
-  hive_ssl_keystore_path = None
-  hive_ssl_keystore_password = None
-
-import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete hdfs directory/file/copyfromlocal we need to call 
params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir,
-  principal_name = hdfs_principal_name,
-  hdfs_site = hdfs_site,
-  default_fs = default_fs
-)

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/service_check.py
deleted file mode 100644
index 2a9d6a5..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/service_check.py
+++ /dev/null
@@ -1,132 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from resource_management import *
-import subprocess
-import time
-
-class SparkServiceCheck(Script):
-  def service_check(self, env):
-    import params
-    import spark_check
-
-    env.set_params(params)
-
-    # smoke_cmd = params.spark_service_check_cmd
-    # code, output = shell.call(smoke_cmd, timeout=100)
-    # if code == 0:
-    #   Logger.info('Spark-on-Yarn Job submitted successfully')
-    # else:
-    #   Logger.info('Spark-on-Yarn Job cannot be submitted')
-    #   raise ComponentIsNotRunning()
-
-    command = "curl"
-    httpGssnegotiate = "--negotiate"
-    userpswd = "-u:"
-    insecure = "-k"
-    silent = "-s"
-    out = "-o /dev/null"
-    head = "-w'%{http_code}'"
-    url = 'http://' + params.spark_history_server_host + ':' + 
str(params.spark_history_ui_port)
-
-    command_with_flags = [command, silent, out, head, httpGssnegotiate, 
userpswd, insecure, url]
-
-    is_running = False
-    for i in range(1,11):
-      proc = subprocess.Popen(command_with_flags, stdout=subprocess.PIPE, 
stderr=subprocess.PIPE)
-      Logger.info("Try %d, command: %s" % (i, " ".join(command_with_flags)))
-      (stdout, stderr) = proc.communicate()
-      response = stdout
-      if '200' in response:
-        is_running = True
-        Logger.info('Spark Job History Server up and running')
-        break
-      Logger.info("Response: %s" % str(response))
-      time.sleep(5)
-
-    if is_running == False :
-      Logger.info('Spark Job History Server not running.')
-      raise ComponentIsNotRunning()
-
-
-    Logger.info('Checking for Spark Thriftserver now')
-    if params.hive_server2_authentication == "KERBEROS" or 
params.hive_server2_authentication == "NONE":
-
-      address_list = params.spark_thriftserver_hosts
-
-      if not address_list:
-        raise Fail('Can not find any Spark Thriftserver host. Please check 
configuration.')
-
-      port = int(format("{spark_thriftserver_port}"))
-      Logger.info("Test connectivity to hive server")
-      if params.security_enabled:
-        kinitcmd=format("{kinit_path_local} -kt {smoke_user_keytab} 
{smokeuser_principal}; ")
-      else:
-        kinitcmd=None
-
-      SOCKET_WAIT_SECONDS = 290
-
-      start_time = time.time()
-      end_time = start_time + SOCKET_WAIT_SECONDS
-
-      Logger.info('Waiting for the Spark Thriftserver to start...')
-
-      workable_server_available = False
-      i = 0
-      while time.time() < end_time and not workable_server_available:
-        address = address_list[i]
-        try:
-          spark_check.check_thrift_port_sasl(address, port, 
params.hive_server2_authentication,
-                                             params.hive_server_principal, 
kinitcmd=kinitcmd, smokeuser=params.smokeuser,
-                                             
transport_mode=params.hive_transport_mode, 
http_endpoint=params.hive_http_endpoint,
-                                             ssl=params.hive_ssl, 
ssl_keystore=params.hive_ssl_keystore_path,
-                                             
ssl_password=params.hive_ssl_keystore_password)
-          Logger.info("Successfully connected to %s on port %s" % (address, 
port))
-          workable_server_available = True
-        except Exception, e:
-          Logger.info(str(e))
-          Logger.info("Connection to %s on port %s failed" % (address, port))
-          time.sleep(5)
-
-        i += 1
-        if i == len(address_list):
-          i = 0
-
-      elapsed_time = time.time() - start_time
-
-      if not workable_server_available:
-        raise Fail("Connection to Spark Thriftserver %s on port %s failed 
after %d seconds" %
-                   (params.hostname, params.spark_thriftserver_port, 
elapsed_time))
-
-      Logger.info("Successfully connected to Spark Thriftserver at %s on port 
%s after %d seconds" % \
-            (params.hostname, params.spark_thriftserver_port, elapsed_time))
-
-
-
-    #command_with_flags = [command, silent, out, head, httpGssnegotiate, 
userpswd, insecure, url]
-    # proc = subprocess.Popen(command_with_flags, stdout=subprocess.PIPE, 
stderr=subprocess.PIPE)
-    # (stdout, stderr) = proc.communicate()
-    # response = stdout
-    # if '200' in response:
-    #   Logger.info('Spark Job History Server up and running')
-    # else:
-    #   Logger.info('Spark Job History Server not running.')
-    #   raise ComponentIsNotRunning()
-
-if __name__ == "__main__":
-  SparkServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark.py
deleted file mode 100644
index 3835c53..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark.py
+++ /dev/null
@@ -1,353 +0,0 @@
-#!/usr/bin/python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-import fileinput
-import shutil
-import os
-from resource_management import *
-from resource_management.core.exceptions import ComponentIsNotRunning
-from resource_management.core.logger import Logger
-from resource_management.core import shell
-
-
-def spark(env):
-  import params
-
-  env.set_params(params)
-
-  Directory(params.spark_conf,
-            owner=params.spark_user,
-            recursive=True,
-            group=params.user_group
-  )
-
-  Directory([params.spark_pid_dir, params.spark_log_dir],
-            owner=params.spark_user,
-            group=params.user_group,
-            mode=0775,
-            recursive=True
-  )
-  if type == 'server':
-    if action == 'start' or action == 'config':
-      params.HdfsResource(params.spark_hdfs_user_dir,
-                         type="directory",
-                         action="create_on_execute",
-                         owner=params.spark_user,
-                         mode=0775
-      )
-      params.HdfsResource(None, action="execute")
-
-  #file_path = params.spark_conf + '/spark-defaults.conf'
-  #create_file(file_path)
-
-  #write_properties_to_file(file_path, 
params.config['configurations']['spark-defaults'])
-  configFile("spark-defaults.conf", template_name="spark-defaults.conf.j2")  
-
-  # create spark-env.sh in conf dir
-  File(os.path.join(params.spark_conf, 'spark-env.sh'),
-       owner=params.spark_user,
-       group=params.user_group,
-       content=InlineTemplate(params.spark_env_sh)
-  )
-
-  # create log4j.properties in conf dir
-  File(os.path.join(params.spark_conf, 'log4j.properties'),
-       owner=params.spark_user,
-       group=params.user_group,
-       content=InlineTemplate(params.spark_log4j)
-  )
-
-  #create metrics.properties in conf dir
-#  File(os.path.join(params.spark_conf, 'metrics.properties'),
-#       owner=params.spark_user,
-#       group=params.spark_group,
-#       content=InlineTemplate(params.spark_metrics_properties)
-#  )
-
-  # create java-opts in etc/spark/conf dir for iop.version
-  File(os.path.join(params.spark_conf, 'java-opts'),
-       owner=params.spark_user,
-       group=params.user_group,
-       content=params.spark_javaopts_properties,
-       mode=0644
-  )
-
-  if params.is_hive_installed:
-    hive_config = get_hive_config()
-    XmlConfig("hive-site.xml",
-              conf_dir=params.spark_conf,
-              configurations=hive_config,
-              owner=params.spark_user,
-              group=params.user_group,
-              mode=0644)
-def get_hive_config():
-  import params
-  # MUST CONVERT BOOLEANS TO LOWERCASE STRINGS
-  hive_conf_dict = dict()
-  hive_conf_dict['hive.metastore.uris'] = 
params.config['configurations']['hive-site']['hive.metastore.uris']
-  hive_conf_dict['ambari.hive.db.schema.name'] = 
params.config['configurations']['hive-site']['ambari.hive.db.schema.name']
-  hive_conf_dict['datanucleus.cache.level2.type'] = 
params.config['configurations']['hive-site']['datanucleus.cache.level2.type']
-  #hive_conf_dict['fs.file.impl.disable.cache'] = 
str(params.config['configurations']['hive-site']['fs.file.impl.disable.cache']).lower()
-  #hive_conf_dict['fs.hdfs.impl.disable.cache'] = 
str(params.config['configurations']['hive-site']['fs.hdfs.impl.disable.cache']).lower()
-  hive_conf_dict['hive.auto.convert.join'] = 
str(params.config['configurations']['hive-site']['hive.auto.convert.join']).lower()
-  hive_conf_dict['hive.auto.convert.join.noconditionaltask'] = 
str(params.config['configurations']['hive-site']['hive.auto.convert.join.noconditionaltask']).lower()
-  hive_conf_dict['hive.auto.convert.join.noconditionaltask.size'] = 
params.config['configurations']['hive-site']['hive.auto.convert.join.noconditionaltask.size']
-  hive_conf_dict['hive.auto.convert.sortmerge.join'] = 
str(params.config['configurations']['hive-site']['hive.auto.convert.sortmerge.join']).lower()
-  #hive_conf_dict['hive.auto.convert.sortmerge.join.noconditionaltask'] = 
str(params.config['configurations']['hive-site']['hive.auto.convert.sortmerge.join.noconditionaltask']).lower()
-  hive_conf_dict['hive.auto.convert.sortmerge.join.to.mapjoin'] = 
str(params.config['configurations']['hive-site']['hive.auto.convert.sortmerge.join.to.mapjoin']).lower()
-  hive_conf_dict['hive.cbo.enable'] = 
str(params.config['configurations']['hive-site']['hive.cbo.enable']).lower()
-  hive_conf_dict['hive.cli.print.header'] = 
str(params.config['configurations']['hive-site']['hive.cli.print.header']).lower()
-  hive_conf_dict['hive.cluster.delegation.token.store.class'] = 
params.config['configurations']['hive-site']['hive.cluster.delegation.token.store.class']
-  
hive_conf_dict['hive.cluster.delegation.token.store.zookeeper.connectString'] = 
params.config['configurations']['hive-site']['hive.cluster.delegation.token.store.zookeeper.connectString']
-  hive_conf_dict['hive.cluster.delegation.token.store.zookeeper.znode'] = 
params.config['configurations']['hive-site']['hive.cluster.delegation.token.store.zookeeper.znode']
-  hive_conf_dict['hive.compactor.abortedtxn.threshold'] = 
params.config['configurations']['hive-site']['hive.compactor.abortedtxn.threshold']
-  hive_conf_dict['hive.compactor.check.interval'] = 
params.config['configurations']['hive-site']['hive.compactor.check.interval']
-  hive_conf_dict['hive.compactor.delta.num.threshold'] = 
params.config['configurations']['hive-site']['hive.compactor.delta.num.threshold']
-  hive_conf_dict['hive.compactor.delta.pct.threshold'] = 
params.config['configurations']['hive-site']['hive.compactor.delta.pct.threshold']
-  hive_conf_dict['hive.compactor.initiator.on'] = 
str(params.config['configurations']['hive-site']['hive.compactor.initiator.on']).lower()
-  hive_conf_dict['hive.compactor.worker.threads'] = 
params.config['configurations']['hive-site']['hive.compactor.worker.threads']
-  hive_conf_dict['hive.compactor.worker.timeout'] = 
params.config['configurations']['hive-site']['hive.compactor.worker.timeout']
-  hive_conf_dict['hive.compute.query.using.stats'] = 
str(params.config['configurations']['hive-site']['hive.compute.query.using.stats']).lower()
-  hive_conf_dict['hive.conf.restricted.list'] = 
params.config['configurations']['hive-site']['hive.conf.restricted.list']
-  hive_conf_dict['hive.enforce.bucketing'] = 
str(params.config['configurations']['hive-site']['hive.enforce.bucketing']).lower()
-  hive_conf_dict['hive.enforce.sorting'] = 
str(params.config['configurations']['hive-site']['hive.enforce.sorting']).lower()
-  hive_conf_dict['hive.enforce.sortmergebucketmapjoin'] = 
str(params.config['configurations']['hive-site']['hive.enforce.sortmergebucketmapjoin']).lower()
-  hive_conf_dict['hive.exec.compress.intermediate'] = 
str(params.config['configurations']['hive-site']['hive.exec.compress.intermediate']).lower()
-  hive_conf_dict['hive.exec.compress.output'] = 
str(params.config['configurations']['hive-site']['hive.exec.compress.output']).lower()
-  hive_conf_dict['hive.exec.dynamic.partition'] = 
str(params.config['configurations']['hive-site']['hive.exec.dynamic.partition']).lower()
-  hive_conf_dict['hive.exec.dynamic.partition.mode'] = 
params.config['configurations']['hive-site']['hive.exec.dynamic.partition.mode']
-  hive_conf_dict['hive.exec.max.created.files'] = 
params.config['configurations']['hive-site']['hive.exec.max.created.files']
-  hive_conf_dict['hive.exec.max.dynamic.partitions'] = 
params.config['configurations']['hive-site']['hive.exec.max.dynamic.partitions']
-  hive_conf_dict['hive.exec.max.dynamic.partitions.pernode'] = 
params.config['configurations']['hive-site']['hive.exec.max.dynamic.partitions.pernode']
-  hive_conf_dict['hive.exec.orc.compression.strategy'] = 
params.config['configurations']['hive-site']['hive.exec.orc.compression.strategy']
-  hive_conf_dict['hive.exec.orc.default.compress'] = 
params.config['configurations']['hive-site']['hive.exec.orc.default.compress']
-  hive_conf_dict['hive.exec.orc.default.stripe.size'] = 
params.config['configurations']['hive-site']['hive.exec.orc.default.stripe.size']
-  hive_conf_dict['hive.exec.parallel'] = 
str(params.config['configurations']['hive-site']['hive.exec.parallel']).lower()
-  hive_conf_dict['hive.exec.parallel.thread.number'] = 
params.config['configurations']['hive-site']['hive.exec.parallel.thread.number']
-  hive_conf_dict['hive.exec.reducers.bytes.per.reducer'] = 
params.config['configurations']['hive-site']['hive.exec.reducers.bytes.per.reducer']
-  hive_conf_dict['hive.exec.reducers.max'] = 
params.config['configurations']['hive-site']['hive.exec.reducers.max']
-  hive_conf_dict['hive.exec.scratchdir'] = 
params.config['configurations']['hive-site']['hive.exec.scratchdir']
-  hive_conf_dict['hive.exec.submit.local.task.via.child'] = 
str(params.config['configurations']['hive-site']['hive.exec.submit.local.task.via.child']).lower()
-  hive_conf_dict['hive.exec.submitviachild'] = 
str(params.config['configurations']['hive-site']['hive.exec.submitviachild']).lower()
-  hive_conf_dict['hive.execution.engine'] = 
params.config['configurations']['hive-site']['hive.execution.engine']
-  hive_conf_dict['hive.fetch.task.aggr'] = 
str(params.config['configurations']['hive-site']['hive.fetch.task.aggr']).lower()
-  hive_conf_dict['hive.fetch.task.conversion'] = 
params.config['configurations']['hive-site']['hive.fetch.task.conversion']
-  hive_conf_dict['hive.fetch.task.conversion.threshold'] = 
params.config['configurations']['hive-site']['hive.fetch.task.conversion.threshold']
-  #hive_conf_dict['hive.heapsize'] = 
params.config['configurations']['hive-env']['hive.heapsize']
-  hive_conf_dict['hive.limit.optimize.enable'] = 
str(params.config['configurations']['hive-site']['hive.limit.optimize.enable']).lower()
-  hive_conf_dict['hive.limit.pushdown.memory.usage'] = 
params.config['configurations']['hive-site']['hive.limit.pushdown.memory.usage']
-  hive_conf_dict['hive.metastore.authorization.storage.checks'] = 
str(params.config['configurations']['hive-site']['hive.metastore.authorization.storage.checks']).lower()
-  hive_conf_dict['hive.metastore.cache.pinobjtypes'] = 
params.config['configurations']['hive-site']['hive.metastore.cache.pinobjtypes']
-  # The following two parameters are hardcoded to workaround an issue
-  # interpreting the syntax for the property values (unit like s for seconds
-  # result in an error)
-  #hive_conf_dict['hive.metastore.client.connect.retry.delay'] = 
params.config['configurations']['hive-site']['hive.metastore.client.connect.retry.delay']
-  #hive_conf_dict['hive.metastore.client.socket.timeout'] = 
params.config['configurations']['hive-site']['hive.metastore.client.socket.timeout']
-  hive_conf_dict['hive.metastore.client.connect.retry.delay'] = '5'
-  hive_conf_dict['hive.metastore.client.socket.timeout'] = '1800'
-  hive_conf_dict['hive.metastore.connect.retries'] = 
params.config['configurations']['hive-site']['hive.metastore.connect.retries']
-  hive_conf_dict['hive.metastore.execute.setugi'] = 
str(params.config['configurations']['hive-site']['hive.metastore.execute.setugi']).lower()
-  hive_conf_dict['hive.metastore.failure.retries'] = 
params.config['configurations']['hive-site']['hive.metastore.failure.retries']
-  hive_conf_dict['hive.metastore.pre.event.listeners'] = 
params.config['configurations']['hive-site']['hive.metastore.pre.event.listeners']
-  hive_conf_dict['hive.metastore.sasl.enabled'] = 
str(params.config['configurations']['hive-site']['hive.metastore.sasl.enabled']).lower()
-  hive_conf_dict['hive.metastore.server.max.threads'] = 
params.config['configurations']['hive-site']['hive.metastore.server.max.threads']
-  hive_conf_dict['hive.metastore.warehouse.dir'] = 
params.config['configurations']['hive-site']['hive.metastore.warehouse.dir']
-  hive_conf_dict['hive.orc.compute.splits.num.threads'] = 
params.config['configurations']['hive-site']['hive.orc.compute.splits.num.threads']
-  hive_conf_dict['hive.orc.splits.include.file.footer'] = 
str(params.config['configurations']['hive-site']['hive.orc.splits.include.file.footer']).lower()
-  hive_conf_dict['hive.security.authenticator.manager'] = 
params.config['configurations']['hive-site']['hive.security.authenticator.manager']
-  hive_conf_dict['hive.security.metastore.authenticator.manager'] = 
params.config['configurations']['hive-site']['hive.security.metastore.authenticator.manager']
-  hive_conf_dict['hive.security.metastore.authorization.auth.reads'] = 
str(params.config['configurations']['hive-site']['hive.security.metastore.authorization.auth.reads']).lower()
-  hive_conf_dict['hive.security.metastore.authorization.manager'] = 
params.config['configurations']['hive-site']['hive.security.metastore.authorization.manager']
-  hive_conf_dict['hive.server2.allow.user.substitution'] = 
str(params.config['configurations']['hive-site']['hive.server2.allow.user.substitution']).lower()
-  hive_conf_dict['hive.server2.logging.operation.enabled'] = 
str(params.config['configurations']['hive-site']['hive.server2.logging.operation.enabled']).lower()
-  hive_conf_dict['hive.server2.logging.operation.log.location'] = 
'/tmp/spark/operation_logs'
-  hive_conf_dict['hive.server2.support.dynamic.service.discovery'] = 'false'
-  hive_conf_dict['hive.server2.table.type.mapping'] = 
params.config['configurations']['hive-site']['hive.server2.table.type.mapping']
-  hive_conf_dict['hive.server2.thrift.http.path'] = 
params.config['configurations']['hive-site']['hive.server2.thrift.http.path']
-  hive_conf_dict['hive.server2.thrift.max.worker.threads'] = 
params.config['configurations']['hive-site']['hive.server2.thrift.max.worker.threads']
-  hive_conf_dict['hive.server2.thrift.port'] = params.spark_thriftserver_port
-  hive_conf_dict['hive.server2.thrift.sasl.qop'] = 
params.config['configurations']['hive-site']['hive.server2.thrift.sasl.qop']
-  hive_conf_dict['hive.server2.transport.mode'] = 
params.config['configurations']['hive-site']['hive.server2.transport.mode']
-  hive_conf_dict['hive.server2.use.SSL'] = 
str(params.config['configurations']['hive-site']['hive.server2.use.SSL']).lower()
-  hive_conf_dict['hive.server2.zookeeper.namespace'] = 
params.config['configurations']['hive-site']['hive.server2.zookeeper.namespace']
-  hive_conf_dict['hive.smbjoin.cache.rows'] = 
params.config['configurations']['hive-site']['hive.smbjoin.cache.rows']
-  hive_conf_dict['hive.stats.autogather'] = 
str(params.config['configurations']['hive-site']['hive.stats.autogather']).lower()
-  hive_conf_dict['hive.stats.dbclass'] = 
params.config['configurations']['hive-site']['hive.stats.dbclass']
-  hive_conf_dict['hive.stats.fetch.column.stats'] = 
params.config['configurations']['hive-site']['hive.stats.fetch.column.stats']
-  hive_conf_dict['hive.stats.fetch.partition.stats'] = 
str(params.config['configurations']['hive-site']['hive.stats.fetch.partition.stats']).lower()
-  hive_conf_dict['hive.support.concurrency'] = 
str(params.config['configurations']['hive-site']['hive.support.concurrency']).lower()
-  hive_conf_dict['hive.txn.manager'] = 
params.config['configurations']['hive-site']['hive.txn.manager']
-  hive_conf_dict['hive.txn.max.open.batch'] = 
params.config['configurations']['hive-site']['hive.txn.max.open.batch']
-  hive_conf_dict['hive.txn.timeout'] = 
params.config['configurations']['hive-site']['hive.txn.timeout']
-  hive_conf_dict['hive.vectorized.execution.enabled'] = 
str(params.config['configurations']['hive-site']['hive.vectorized.execution.enabled']).lower()
-  hive_conf_dict['hive.vectorized.execution.reduce.enabled'] = 
str(params.config['configurations']['hive-site']['hive.vectorized.execution.reduce.enabled']).lower()
-  hive_conf_dict['hive.vectorized.groupby.checkinterval'] = 
params.config['configurations']['hive-site']['hive.vectorized.groupby.checkinterval']
-  hive_conf_dict['hive.vectorized.groupby.flush.percent'] = 
params.config['configurations']['hive-site']['hive.vectorized.groupby.flush.percent']
-  hive_conf_dict['hive.vectorized.groupby.maxentries'] = 
params.config['configurations']['hive-site']['hive.vectorized.groupby.maxentries']
-  hive_conf_dict['hive.zookeeper.client.port'] = 
params.config['configurations']['hive-site']['hive.zookeeper.client.port']
-  hive_conf_dict['hive.zookeeper.namespace'] = 
params.config['configurations']['hive-site']['hive.zookeeper.namespace']
-  hive_conf_dict['hive.zookeeper.quorum'] = 
params.config['configurations']['hive-site']['hive.zookeeper.quorum']
-  hive_conf_dict['javax.jdo.option.ConnectionDriverName'] = 
params.config['configurations']['hive-site']['javax.jdo.option.ConnectionDriverName']
-  hive_conf_dict['javax.jdo.option.ConnectionURL'] = 
params.config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
-  hive_conf_dict['javax.jdo.option.ConnectionUserName'] = 
params.config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
-
-# Comment out parameters not found in configurations dictionary
-# hive_conf_dict['hive.prewarm.enabled'] = 
params.config['configurations']['hive-site']['hive.prewarm.enabled']
-# hive_conf_dict['hive.prewarm.numcontainers'] = 
params.config['configurations']['hive-site']['hive.prewarm.numcontainers']
-#  hive_conf_dict['hive.user.install.directory'] = 
params.config['configurations']['hive-site']['hive.user.install.directory']
-
-  if params.security_enabled:
-    hive_conf_dict['hive.metastore.sasl.enabled'] =  
str(params.config['configurations']['hive-site']['hive.metastore.sasl.enabled']).lower()
-    hive_conf_dict['hive.server2.authentication'] = 
params.config['configurations']['hive-site']['hive.server2.authentication']
-    hive_conf_dict['hive.metastore.kerberos.keytab.file'] = 
params.config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
-    hive_conf_dict['hive.metastore.kerberos.principal'] = 
params.config['configurations']['hive-site']['hive.metastore.kerberos.principal']
-    hive_conf_dict['hive.server2.authentication.spnego.principal'] =  
params.config['configurations']['hive-site']['hive.server2.authentication.spnego.principal']
-    hive_conf_dict['hive.server2.authentication.spnego.keytab'] = 
params.config['configurations']['hive-site']['hive.server2.authentication.spnego.keytab']
-    hive_conf_dict['hive.server2.authentication.kerberos.principal'] = 
params.config['configurations']['hive-site']['hive.server2.authentication.kerberos.principal']
-    hive_conf_dict['hive.server2.authentication.kerberos.keytab'] =  
params.config['configurations']['hive-site']['hive.server2.authentication.kerberos.keytab']
-    hive_conf_dict['hive.security.authorization.enabled']=  
str(params.config['configurations']['hive-site']['hive.security.authorization.enabled']).lower()
-    hive_conf_dict['hive.security.authorization.manager'] = 
str(params.config['configurations']['hive-site']['hive.security.authorization.manager'])
-    hive_conf_dict['hive.server2.enable.doAs'] =  
str(params.config['configurations']['hive-site']['hive.server2.enable.doAs']).lower()
-
-  if hive_conf_dict['hive.server2.use.SSL']:
-    if 
params.config['configurations']['hive-site']['hive.server2.keystore.path'] is 
not None:
-      hive_conf_dict['hive.server2.keystore.path']= 
str(params.config['configurations']['hive-site']['hive.server2.keystore.path'])
-    if 
params.config['configurations']['hive-site']['hive.server2.keystore.password'] 
is not None:
-      hive_conf_dict['hive.server2.keystore.password']= 
str(params.config['configurations']['hive-site']['hive.server2.keystore.password'])
-
-  # convert remaining numbers to strings
-  for key, value in hive_conf_dict.iteritems():
-    hive_conf_dict[key] = str(value)
-
-  return hive_conf_dict
-
-
-def spark_properties(params):
-  spark_dict = dict()
-
-  all_spark_config  = params.config['configurations']['spark-defaults']
-  #Add all configs unfiltered first to handle Custom case.
-  spark_dict = all_spark_config.copy()
-
-  spark_dict['spark.yarn.executor.memoryOverhead'] = 
params.spark_yarn_executor_memoryOverhead
-  spark_dict['spark.yarn.driver.memoryOverhead'] = 
params.spark_yarn_driver_memoryOverhead
-  spark_dict['spark.yarn.applicationMaster.waitTries'] = 
params.spark_yarn_applicationMaster_waitTries
-  spark_dict['spark.yarn.scheduler.heartbeat.interval-ms'] = 
params.spark_yarn_scheduler_heartbeat_interval
-  spark_dict['spark.yarn.max_executor.failures'] = 
params.spark_yarn_max_executor_failures
-  spark_dict['spark.yarn.queue'] = params.spark_yarn_queue
-  spark_dict['spark.yarn.containerLauncherMaxThreads'] = 
params.spark_yarn_containerLauncherMaxThreads
-  spark_dict['spark.yarn.submit.file.replication'] = 
params.spark_yarn_submit_file_replication
-  spark_dict['spark.yarn.preserve.staging.files'] = 
params.spark_yarn_preserve_staging_files
-
-  # Hardcoded paramaters to be added to spark-defaults.conf
-  spark_dict['spark.yarn.historyServer.address'] = 
params.spark_history_server_host + ':' + str(
-    params.spark_history_ui_port)
-  spark_dict['spark.history.ui.port'] = params.spark_history_ui_port
-  spark_dict['spark.eventLog.enabled'] = 
str(params.spark_eventlog_enabled).lower()
-  spark_dict['spark.eventLog.dir'] = params.spark_eventlog_dir
-  spark_dict['spark.history.fs.logDirectory'] = params.spark_eventlog_dir
-  spark_dict['spark.yarn.jar'] = params.spark_yarn_jar
-
-  spark_dict['spark.driver.extraJavaOptions'] = 
params.spark_driver_extraJavaOptions
-  spark_dict['spark.yarn.am.extraJavaOptions'] = 
params.spark_yarn_am_extraJavaOptions
-
-  # convert remaining numbers to strings
-  for key, value in spark_dict.iteritems():
-    spark_dict[key] = str(value)
-
-  return spark_dict
-
-
-def write_properties_to_file(file_path, value):
-  for key in value:
-    modify_config(file_path, key, value[key])
-
-
-def modify_config(filepath, variable, setting):
-  var_found = False
-  already_set = False
-  V = str(variable)
-  S = str(setting)
-
-  if ' ' in S:
-    S = '%s' % S
-
-  for line in fileinput.input(filepath, inplace=1):
-    if not line.lstrip(' ').startswith('#') and '=' in line:
-      _infile_var = str(line.split('=')[0].rstrip(' '))
-      _infile_set = str(line.split('=')[1].lstrip(' ').rstrip())
-      if var_found == False and _infile_var.rstrip(' ') == V:
-        var_found = True
-        if _infile_set.lstrip(' ') == S:
-          already_set = True
-        else:
-          line = "%s %s\n" % (V, S)
-
-    sys.stdout.write(line)
-
-  if not var_found:
-    with open(filepath, "a") as f:
-      f.write("%s \t %s\n" % (V, S))
-  elif already_set == True:
-    pass
-  else:
-    pass
-
-  return
-
-
-def create_file(file_path):
-  try:
-    file = open(file_path, 'w')
-    file.close()
-  except:
-    print('Unable to create file: ' + file_path)
-    sys.exit(0)
-
-def configFile(name, template_name=None):
-  import params
-
-  File(format("{spark_conf}/{name}"),
-       content=Template(template_name),
-       owner=params.spark_user,
-       group=params.user_group,
-       mode=0644
-  )
-
-def get_iop_version():
-  try:
-    command = 'iop-select status hadoop-client'
-    return_code, iop_output = shell.call(command, timeout=20)
-  except Exception, e:
-    Logger.error(str(e))
-    raise Fail('Unable to execute iop-select command to retrieve the version.')
-
-  if return_code != 0:
-    raise Fail(
-      'Unable to determine the current version because of a non-zero return 
code of {0}'.format(str(return_code)))
-
-  iop_version = re.sub('hadoop-client - ', '', iop_output)
-  match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+', iop_version)
-
-  if match is None:
-    raise Fail('Failed to get extracted version')
-
-  return iop_version

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_check.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_check.py
deleted file mode 100644
index 0029591..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_check.py
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-import socket
-import params
-
-from resource_management.core.exceptions import Fail
-from resource_management.core.resources import Execute
-from resource_management.libraries.functions import format
-
-from resource_management.core import Logger
-
-
-def check_thrift_port_sasl(address, port, hive_auth="NOSASL", key=None, 
kinitcmd=None, smokeuser='ambari-qa',
-                           transport_mode="binary", 
http_endpoint="cliservice", ssl=False, ssl_keystore=None,
-                           ssl_password=None):
-    """
-    Hive thrift SASL port check
-    """
-
-    # check params to be correctly passed, if not - try to cast them
-    if isinstance(port, str):
-        port = int(port)
-
-    if isinstance(ssl, str):
-        if str(ssl).lower() == str("true"):
-            ssl = bool(True)
-        else:
-            ssl = bool(False)
-
-    # to pass as beeline argument
-    ssl_str = str(ssl).lower()
-    beeline_check_timeout = 60
-    beeline_url = ['jdbc:hive2://{address}:{port}/', 
"transportMode={transport_mode}"]
-
-    # append url according to used transport
-    if transport_mode == "http":
-        beeline_url.append('httpPath={http_endpoint}')
-
-    # append url according to used auth
-    if hive_auth == "NOSASL":
-        beeline_url.append('auth=noSasl')
-
-    # append url according to ssl configuration
-    if ssl and ssl_keystore is not None and ssl_password is not None:
-        beeline_url.extend(['ssl={ssl_str}', 'sslTrustStore={ssl_keystore}', 
'trustStorePassword={ssl_password!p}'])
-
-    # append url according to kerberos setting
-    if kinitcmd:
-        beeline_url.append('principal={key}')
-        Execute(kinitcmd, user=smokeuser)
-
-    cmd = "! beeline -u '%s' -e '' 2>&1| awk '{print}'|grep -i -e 'Connection 
refused' -e 'Invalid URL' -e 'Error'" % \
-          format(";".join(beeline_url))
-    Execute(cmd,
-            user=smokeuser,
-            path=["/usr/iop/"+params.iop_full_version+"/spark/bin"],
-            timeout=beeline_check_timeout
-            )

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_client.py
deleted file mode 100644
index 5c2dd74..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/spark_client.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import iop_select
-#from resource_management.libraries.functions.version import compare_versions, 
format_hdp_stack_version
-from resource_management.core.exceptions import ComponentIsNotRunning
-from resource_management.core.logger import Logger
-from resource_management.core import shell
-from spark import spark
-
-
-class SparkClient(Script):
-
-  def get_stack_to_component(self):
-    return {"BigInsights":"spark-client"};
-
-  def pre_upgrade_restart(self, env, upgrade_type=None):
-    import params
-
-    env.set_params(params)
-    if params.version and 
compare_versions(format_hdp_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "spark", params.version)
-      iop_select.select("spark-client", params.version)
-      #Execute(format("iop-select set spark-client {version}"))
-
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-
-    env.set_params(params)
-    spark(env)
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-
-if __name__ == "__main__":
-  SparkClient().execute()
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/status_params.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/status_params.py
deleted file mode 100644
index a666994..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/status_params.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-config = Script.get_config()
-
-spark_user = config['configurations']['spark-env']['spark_user']
-spark_group = config['configurations']['spark-env']['spark_group']
-user_group = config['configurations']['cluster-env']['user_group']
-
-if 'hive-env' in config['configurations']:
-    hive_user = config['configurations']['hive-env']['hive_user']
-else:
-    hive_user = "hive"
-
-spark_pid_dir = config['configurations']['spark-env']['spark_pid_dir']
-spark_history_server_pid_file = 
format("{spark_pid_dir}/spark-{spark_user}-org.apache.spark.deploy.history.HistoryServer-1.pid")
-spark_thrift_server_pid_file = 
format("{spark_pid_dir}/spark-{hive_user}-org.apache.spark.sql.hive.thriftserver.HiveThriftServer2-1.pid")

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/thrift_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/thrift_server.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/thrift_server.py
deleted file mode 100644
index 20002f8..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/scripts/thrift_server.py
+++ /dev/null
@@ -1,119 +0,0 @@
-#!/usr/bin/python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-import socket
-import os
-from resource_management import *
-from resource_management.libraries.functions import iop_select
-from resource_management.libraries.functions import conf_select
-from resource_management.core.exceptions import ComponentIsNotRunning
-from resource_management.core.logger import Logger
-from resource_management.core import shell
-from resource_management.libraries.functions import Direction
-from spark import *
-
-
-class ThriftServer(Script):
-
-  def get_stack_to_component(self):
-    return {"BigInsights":"spark-thriftserver"};
-  
-
-  def pre_upgrade_restart(self, env, upgrade_type=None):
-    import params
-
-    env.set_params(params)
-    if params.version and 
compare_versions(format_hdp_stack_version(params.version), '4.0.0.0') >= 0:
-      conf_select.select(params.stack_name, "spark", params.version)
-      iop_select.select("spark-thriftserver", params.version)
-
-  def install(self, env):
-    self.install_packages(env)
-    import params
-    env.set_params(params)
-    self.configure(env)
-
-  def stop(self, env, upgrade_type=None):
-    import params
-
-    env.set_params(params)
-    self.configure(env)
-    daemon_cmd = format('{spark_thrift_server_stop}')
-    Execute(daemon_cmd,
-            user=params.hive_user,
-            environment={'JAVA_HOME': params.java_home}
-    )
-    if os.path.isfile(params.spark_thrift_server_pid_file):
-      os.remove(params.spark_thrift_server_pid_file)
-
-
-  def start(self, env, upgrade_type=None):
-    import params
-
-    env.set_params(params)
-    # TODO this looks wrong, maybe just call spark(env)
-    self.configure(env)
-
-    if params.security_enabled:
-        hive_kerberos_keytab = 
params.config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
-        hive_principal = 
params.config['configurations']['hive-site']['hive.metastore.kerberos.principal'].replace('_HOST',
 socket.getfqdn().lower())
-        hive_kinit_cmd = format("{kinit_path_local} -kt {hive_kerberos_keytab} 
{hive_principal}; ")
-        Execute(hive_kinit_cmd, user=params.hive_user)
-
-    # FIXME! TODO! remove this after soft link bug is fixed:
-    #if not os.path.islink('/usr/iop/current/spark'):
-    #  iop_version = get_iop_version()
-    #  cmd = 'ln -s /usr/iop/' + iop_version + '/spark /usr/iop/current/spark'
-    #  Execute(cmd)
-
-    daemon_cmd = format('{spark_thrift_server_start} --conf 
spark.ui.port={params.spark_thriftserver_ui_port}')
-    no_op_test = format(
-      'ls {spark_thrift_server_pid_file} >/dev/null 2>&1 && ps -p `cat 
{spark_thrift_server_pid_file}` >/dev/null 2>&1')
-    if upgrade_type is not None and params.upgrade_direction == 
Direction.DOWNGRADE and not params.security_enabled:
-      Execute(daemon_cmd,
-              user=params.spark_user,
-              environment={'JAVA_HOME': params.java_home},
-              not_if=no_op_test
-              )
-    else:
-      Execute(daemon_cmd,
-              user=params.hive_user,
-              environment={'JAVA_HOME': params.java_home},
-              not_if=no_op_test
-      )
-
-  def status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    pid_file = format("{spark_thrift_server_pid_file}")
-    # Recursively check all existing gmetad pid files
-    check_process_status(pid_file)
-
-  # Note: This function is not called from start()/install()
-  def configure(self, env):
-    import params
-
-    env.set_params(params)
-    spark(env)
-
-if __name__ == "__main__":
-  ThriftServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/templates/spark-defaults.conf.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/templates/spark-defaults.conf.j2
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/templates/spark-defaults.conf.j2
deleted file mode 100644
index 83c971d..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SPARK/package/templates/spark-defaults.conf.j2
+++ /dev/null
@@ -1,43 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-#
-
-{% for key, value in spark_conf_properties_map.iteritems() -%}
-  {{key}}       {{value}}
-{% endfor %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/configuration/sqoop-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/configuration/sqoop-env.xml
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/configuration/sqoop-env.xml
deleted file mode 100644
index 82fa9d0..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/configuration/sqoop-env.xml
+++ /dev/null
@@ -1,59 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-  <!-- sqoop-env.sh -->
-  <property>
-    <name>content</name>
-    <description>This is the jinja template for sqoop-env.sh file</description>
-    <value>
-# Set Hadoop-specific environment variables here.
-
-#Set path to where bin/hadoop is available
-#Set path to where bin/hadoop is available
-export HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
-
-#set the path to where bin/hbase is available
-export HBASE_HOME=${HBASE_HOME:-{{hbase_home}}}
-
-#Set the path to where bin/hive is available
-export HIVE_HOME=${HIVE_HOME:-{{hive_home}}}
-
-#Set the path for where zookeper config dir is
-export ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}
-
-# add libthrift in hive to sqoop class path first so hive imports work
-export SQOOP_USER_CLASSPATH="`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}"
-    </value>
-  </property>
-  <property>
-    <name>sqoop_user</name>
-    <description>User to run Sqoop as</description>
-    <property-type>USER</property-type>
-    <value>sqoop</value>
-  </property>
-  <property>
-    <name>jdbc_drivers</name>
-    <description>Comma separated list of additional JDBC drivers class 
names</description>
-    <value> </value>
-  </property>  
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/metainfo.xml
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/metainfo.xml
deleted file mode 100644
index 2be43be..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/metainfo.xml
+++ /dev/null
@@ -1,95 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>SQOOP</name>
-      <displayName>Sqoop</displayName>
-      <comment>Tool for transferring bulk data between Apache Hadoop and
-        structured data stores such as relational databases
-      </comment>
-      <version>1.4.6</version>
-
-      <components>
-        <component>
-          <name>SQOOP</name>
-          <displayName>Sqoop</displayName>
-          <category>CLIENT</category>
-          <cardinality>1+</cardinality>
-          <versionAdvertised>true</versionAdvertised>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/sqoop_client.py</script>
-            <scriptType>PYTHON</scriptType>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>env</type>
-              <fileName>sqoop-env.sh</fileName>
-              <dictionaryName>sqoop-env</dictionaryName>
-            </configFile>
-          </configFiles>
-        </component>
-      </components>
-      
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>sqoop_4_2_*</name>
-            </package>
-            <package>
-              <name>mysql-connector-java</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>HDFS</service>
-      </requiredServices>
-      
-      <configuration-dependencies>
-        <config-type>sqoop-env</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/__init__.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/__init__.py
deleted file mode 100644
index 5561e10..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/params.py
deleted file mode 100644
index 32aec76..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/params.py
+++ /dev/null
@@ -1,95 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
-from resource_management.libraries.functions.default import default
-from resource_management import *
-
-# a map of the Ambari role to the component name
-# for use with /usr/iop/current/<component>
-SERVER_ROLE_DIRECTORY_MAP = {
-  'SQOOP' : 'sqoop-client'
-}
-
-component_directory = 
Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "SQOOP")
-
-config = Script.get_config()
-ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
-
-stack_name = default("/hostLevelParams/stack_name", None)
-
-stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-stack_version = format_hdp_stack_version(stack_version_unformatted)
-
-# New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
-version = default("/commandParams/version", None)
-
-#hadoop params
-sqoop_conf_dir = "/usr/iop/current/sqoop-client/conf"
-sqoop_lib = '/usr/iop/current/sqoop-client/lib'
-hadoop_home = '/usr/iop/current/hadoop-client'
-hbase_home = '/usr/iop/current/hbase-client'
-hive_home = '/usr/iop/current/hive-client'
-sqoop_bin_dir = '/usr/iop/current/sqoop-client/bin/'
-
-zoo_conf_dir = "/usr/iop/current/zookeeper-client/conf"
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-smokeuser_principal = 
config['configurations']['cluster-env']['smokeuser_principal_name']
-user_group = config['configurations']['cluster-env']['user_group']
-sqoop_env_sh_template = config['configurations']['sqoop-env']['content']
-
-sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
-
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path()
-#JDBC driver jar name
-sqoop_jdbc_drivers_dict = {}
-sqoop_jdbc_drivers_name_dict = {}
-if "jdbc_drivers" in config['configurations']['sqoop-env']:
-  sqoop_jdbc_drivers = 
config['configurations']['sqoop-env']['jdbc_drivers'].split(',')
-
-  for driver_name in sqoop_jdbc_drivers:
-    driver_name = driver_name.strip()
-    if driver_name and not driver_name == '':
-      if driver_name == "com.microsoft.sqlserver.jdbc.SQLServerDriver":
-        jdbc_jar_name = "sqljdbc4.jar"
-        jdbc_symlink_name = "mssql-jdbc-driver.jar"
-        jdbc_driver_name = "mssql"
-      elif driver_name == "com.mysql.jdbc.Driver":
-        jdbc_jar_name = "mysql-connector-java.jar"
-        jdbc_symlink_name = "mysql-jdbc-driver.jar"
-        jdbc_driver_name = "mysql"
-      elif driver_name == "org.postgresql.Driver":
-        jdbc_jar_name = "postgresql-jdbc.jar"
-        jdbc_symlink_name = "postgres-jdbc-driver.jar"
-        jdbc_driver_name = "postgres"
-      elif driver_name == "oracle.jdbc.driver.OracleDriver":
-        jdbc_jar_name = "ojdbc.jar"
-        jdbc_symlink_name = "oracle-jdbc-driver.jar"
-        jdbc_driver_name = "oracle"
-      elif driver_name == "org.hsqldb.jdbc.JDBCDriver":
-        jdbc_jar_name = "hsqldb.jar"
-        jdbc_symlink_name = "hsqldb-jdbc-driver.jar"
-        jdbc_driver_name = "hsqldb"
-    else:
-      continue
-    sqoop_jdbc_drivers_dict[jdbc_jar_name] = jdbc_symlink_name
-    sqoop_jdbc_drivers_name_dict[jdbc_jar_name] = jdbc_driver_name
-jdk_location = config['hostLevelParams']['jdk_location']

http://git-wip-us.apache.org/repos/asf/ambari/blob/44e21f8e/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/service_check.py
deleted file mode 100644
index 427f260..0000000
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/SQOOP/package/scripts/service_check.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-
-from resource_management import *
-
-
-class SqoopServiceCheck(Script):
-
-  def get_stack_to_component(self):
-    return {"BigInsights": "sqoop-server"}
-
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-    if params.security_enabled:
-      Execute(format("{kinit_path_local}  -kt {smoke_user_keytab} 
{smokeuser_principal}"),
-              user = params.smokeuser,
-      )
-    Execute("sqoop version",
-            user = params.smokeuser,
-            path = params.sqoop_bin_dir,
-            logoutput = True
-    )
-
-if __name__ == "__main__":
-  SqoopServiceCheck().execute()

Reply via email to