http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/webhcat_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/webhcat_service_check.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/webhcat_service_check.py
new file mode 100644
index 0000000..8d15e47
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/scripts/webhcat_service_check.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+def webhcat_service_check():
+  import params
+  File(format("{tmp_dir}/templetonSmoke.sh"),
+       content= StaticFile('templetonSmoke.sh'),
+       mode=0755
+  )
+
+  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} 
{smokeuser} {smokeuser_keytab}"
+               " {security_param} {kinit_path_local}",
+               smokeuser_keytab=params.smoke_user_keytab if 
params.security_enabled else "no_keytab")
+
+  Execute(cmd,
+          tries=3,
+          try_sleep=5,
+          path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+          logoutput=True)
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/hcat-env.sh.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/hcat-env.sh.j2
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/hcat-env.sh.j2
deleted file mode 100644
index 0b9dcc3..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/hcat-env.sh.j2
+++ /dev/null
@@ -1,43 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-JAVA_HOME={{java64_home}}
-HCAT_PID_DIR={{hcat_pid_dir}}/
-HCAT_LOG_DIR={{hcat_log_dir}}/
-HCAT_CONF_DIR={{hcat_conf_dir}}
-HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
-#DBROOT is the path where the connector jars are downloaded
-DBROOT={{hcat_dbroot}}
-USER={{hcat_user}}
-METASTORE_PORT={{hive_metastore_port}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/startHiveserver2.sh.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/startHiveserver2.sh.j2
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/startHiveserver2.sh.j2
index a8fe21c..3ddf50f 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/startHiveserver2.sh.j2
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/HIVE/package/templates/startHiveserver2.sh.j2
@@ -25,5 +25,5 @@ HIVE_SERVER2_OPTS=" -hiveconf hive.log.file=hiveserver2.log 
-hiveconf hive.log.d
 HIVE_SERVER2_OPTS="${HIVE_SERVER2_OPTS} -hiveconf 
hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator
 -hiveconf 
hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory
 "
 {% endif %}
 
-HIVE_CONF_DIR=$4 /usr/lib/hive/bin/hiveserver2 -hiveconf hive.metastore.uris=" 
" ${HIVE_SERVER2_OPTS} > $1 2> $2 &
+HIVE_CONF_DIR=$4 {{hive_bin}}/hiveserver2 -hiveconf hive.metastore.uris=" " 
${HIVE_SERVER2_OPTS} > $1 2> $2 &
 echo $!|cat>$3

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/metainfo.xml
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/metainfo.xml
deleted file mode 100644
index 058f047..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/metainfo.xml
+++ /dev/null
@@ -1,66 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>MAHOUT</name>
-      <displayName>Mahout</displayName>
-      <comment>The Apache Mahout project's goal is to build a scalable machine 
learning library</comment>
-      <version>0.9.666</version>
-      <components>
-        <component>
-          <name>MAHOUT</name>
-          <displayName>Mahout Client</displayName>
-          <category>CLIENT</category>
-          <cardinality>0+</cardinality>
-          <commandScript>
-            <script>scripts/mahout_client.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>mahout</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>600</timeout>
-      </commandScript>
-
-      <requiredServices>
-        <service>YARN</service>
-      </requiredServices>
-
-      <configuration-dependencies>
-        <config-type>global</config-type>
-      </configuration-dependencies>
-
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/mahout.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/mahout.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/mahout.py
deleted file mode 100644
index 3984c89..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/mahout.py
+++ /dev/null
@@ -1,66 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-import os
-
-from resource_management import *
-
-def mahout():
-  import params
-
-  Directory( params.mahout_conf_dir,
-    owner = params.hdfs_user,
-    group = params.user_group
-  )
-
-  mahout_TemplateConfig( ['mahout-env.sh'])
-
-  # mahout_properties is always set to a default even if it's not in the 
payload
-  File(format("{mahout_conf_dir}/mahout.properties"),
-              mode=0644,
-              group=params.user_group,
-              owner=params.hdfs_user,
-              content=params.mahout_properties
-  )
-
-  if params.log4j_props:
-    File(format("{mahout_conf_dir}/log4j.properties"),
-      mode=0644,
-      group=params.user_group,
-      owner=params.hdfs_user,
-      content=params.log4j_props
-    )
-  elif (os.path.exists(format("{mahout_conf_dir}/log4j.properties"))):
-    File(format("{mahout_conf_dir}/log4j.properties"),
-      mode=0644,
-      group=params.user_group,
-      owner=params.hdfs_user
-    )
-
-def mahout_TemplateConfig(name):
-  import params
-
-  if not isinstance(name, list):
-    name = [name]
-
-  for x in name:
-    TemplateConfig( format("{mahout_conf_dir}/{x}"),
-        owner = params.hdfs_user
-    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/mahout_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/mahout_client.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/mahout_client.py
deleted file mode 100644
index 8457bfc..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/mahout_client.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-http://www.apache.org/licenses/LICENSE-2.0
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-Ambari Agent
-"""
-import sys
-from resource_management import *
-from mahout import mahout
-
-class MahoutClient(Script):
-
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    mahout()
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-if __name__ == "__main__":
-  MahoutClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/params.py
deleted file mode 100644
index d6cd33f..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/params.py
+++ /dev/null
@@ -1,55 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management import *
-
-# server configurations
-config = Script.get_config()
-
-hadoop_log_dir = "/var/log/hadoop"
-mahout_conf_dir = "/etc/mahout/conf"
-hadoop_conf_dir = "/etc/hadoop/conf"
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
-
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-user_group = config['configurations']['cluster-env']['user_group']
-_authentication = 
config['configurations']['core-site']['hadoop.security.authentication']
-security_enabled = ( not is_empty(_authentication) and _authentication == 
'kerberos')
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), 
"/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
-
-# not supporting 32 bit jdk.
-java64_home = config['hostLevelParams']['java_home']
-hadoop_home = "/usr/lib/hadoop/"
-
-# mahout.properties - if not in the JSON command, then we need to esnure some 
-# basic properties are set; this is a safety mechanism
-if (('mahout-properties' in config['configurations']) and ('mahout-content' in 
config['configurations']['mahout-properties'])):
-  mahout_properties = 
config['configurations']['mahout-properties']['mahout-content']
-else:
-  mahout_properties = "mahout.location.check.strict=false"
-
-# log4j.properties
-if (('mahout-log4j' in config['configurations']) and ('content' in 
config['configurations']['mahout-log4j'])):
-  log4j_props = config['configurations']['mahout-log4j']['content']
-else:
-  log4j_props = None

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/service_check.py
deleted file mode 100644
index bb2ec4d..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/scripts/service_check.py
+++ /dev/null
@@ -1,92 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-import os
-from resource_management import *
-
-class MahoutServiceCheck(Script):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-
-    # prepare the input file content
-    input_file = os.path.join("/tmp", "mahout_input.csv")
-    input_file_content = """\
-1,101,5.0
-1,102,5.0
-1,103,2.5
-2,101,2.0
-2,102,2.5
-2,103,5.0
-2,104,2.0
-3,101,2.5
-3,104,4.0
-3,105,4.5
-3,107,5.0"""
-
-    File(input_file, content=input_file_content, mode=644)
-
-    # create the log dir for the smoke user
-    Directory(os.path.join(params.hadoop_log_dir, params.smokeuser),
-                   owner = params.smokeuser,
-              group = params.user_group,
-              mode = 755)
-
-    # transfer the input file to hdfs
-    recommenderdata_dir = "recommenderdata"
-    recommenderoutput_dir = "recommenderoutput"
-    cleanup_cmd = format("fs -rm -r {recommenderdata_dir} 
{recommenderoutput_dir} temp")
-    #cleanup put below to handle retries; if retrying there wil be a stale 
file that needs cleanup; exit code is fn of second command
-    create_file_cmd = format("{cleanup_cmd}; hadoop fs -put {input_file} 
{recommenderdata_dir}") #TODO: inconsistent that second command needs hadoop
-
-    test_cmd_cat = "mahout cat /etc/passwd"
-    test_cmd_recommendation = format("mahout recommenditembased --input 
{recommenderdata_dir} --output {recommenderoutput_dir} -s 
SIMILARITY_COOCCURRENCE")
-
-    ExecuteHadoop(create_file_cmd,
-      tries     = 3,
-      try_sleep = 5,
-      user      = params.smokeuser,
-      conf_dir = params.hadoop_conf_dir,
-      # for kinit run
-      keytab = params.smoke_user_keytab,
-      security_enabled = params.security_enabled,
-      kinit_path_local = params.kinit_path_local,
-      logoutput = True
-    )
-
-    Execute(test_cmd_cat,
-      tries     = 3,
-      try_sleep = 5,
-      path      = '/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
-      user      = params.smokeuser,
-      logoutput = True
-    )
-
-    Execute(test_cmd_recommendation,
-      tries     = 3,
-      try_sleep = 5,
-      path      = '/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
-      user      = params.smokeuser,
-      logoutput = True,
-    )
-
-if __name__ == "__main__":
-  MahoutServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/templates/mahout-env.sh.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/templates/mahout-env.sh.j2
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/templates/mahout-env.sh.j2
deleted file mode 100644
index 3f97f10..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/MAHOUT/package/templates/mahout-env.sh.j2
+++ /dev/null
@@ -1,34 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-JAVA_HOME={{java64_home}}
-HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/configuration/oozie-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/configuration/oozie-env.xml
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/configuration/oozie-env.xml
index fc47a70..4f8c93a 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/configuration/oozie-env.xml
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/configuration/oozie-env.xml
@@ -67,7 +67,7 @@
 
 if [ -d "/usr/lib/bigtop-tomcat" ]; then
   export OOZIE_CONFIG=${OOZIE_CONFIG:-/etc/oozie/conf}
-  export CATALINA_BASE=${CATALINA_BASE:-/var/lib/oozie/oozie-server}
+  export CATALINA_BASE=${CATALINA_BASE:-{{oozie_server_dir}}}
   export CATALINA_TMPDIR=${CATALINA_TMPDIR:-/var/tmp/oozie}
   export OOZIE_CATALINA_HOME=/usr/lib/bigtop-tomcat
 fi
@@ -122,7 +122,7 @@ export OOZIE_ADMIN_PORT={{oozie_server_admin_port}}
 # The base URL for callback URLs to Oozie
 #
 # export 
OOZIE_BASE_URL="http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie";
-export JAVA_LIBRARY_PATH=/usr/lib/hadoop/lib/native/Linux-amd64-64
+export JAVA_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-amd64-64
     </value>
   </property>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/configuration/oozie-log4j.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/configuration/oozie-log4j.xml
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/configuration/oozie-log4j.xml
index d885c66..7f7158f 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/configuration/oozie-log4j.xml
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/configuration/oozie-log4j.xml
@@ -52,7 +52,7 @@ log4j.appender.oozie.DatePattern='.'yyyy-MM-dd-HH
 log4j.appender.oozie.File=${oozie.log.dir}/oozie.log
 log4j.appender.oozie.Append=true
 log4j.appender.oozie.layout=org.apache.log4j.PatternLayout
-log4j.appender.oozie.layout.ConversionPattern=%d{ISO8601} %5p %c{1}:%L - %m%n
+log4j.appender.oozie.layout.ConversionPattern=%d{ISO8601} %5p %c{1}:%L - 
SERVER[${oozie.instance.id}] %m%n
 
 log4j.appender.oozieops=org.apache.log4j.DailyRollingFileAppender
 log4j.appender.oozieops.DatePattern='.'yyyy-MM-dd

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/metainfo.xml
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/metainfo.xml
index 9d4247e..2d6f93f 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/metainfo.xml
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/metainfo.xml
@@ -23,7 +23,7 @@
       <displayName>Oozie</displayName>
       <comment>System for workflow coordination and execution of Apache Hadoop 
jobs.  This also includes the installation of the optional Oozie Web Console 
which relies on and will install the &lt;a target="_blank" 
href="http://www.sencha.com/legal/open-source-faq/"&gt;ExtJS&lt;/a&gt; Library.
       </comment>
-      <version>4.0.0.2.0</version>
+      <version>4.0.1.691</version>
       <components>
         <component>
           <name>OOZIE_SERVER</name>
@@ -121,22 +121,10 @@
           </packages>
         </osSpecific>
         
-       <osSpecific>
-          <osFamily>redhat5,redhat6,suse11</osFamily>
-          <packages>
-            <package>
-              <name>extjs-2.2-1</name>
-            </package>
-          </packages>
-        </osSpecific>
-        
         <osSpecific>
           <osFamily>ubuntu12</osFamily>
           <packages>
             <package>
-              <name>extjs</name>
-            </package>
-            <package>
               <name>libxml2-utils</name>
             </package>
           </packages>

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/files/oozieSmoke2.sh
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/files/oozieSmoke2.sh
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/files/oozieSmoke2.sh
index 0a80d0f..8edcd7e 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/files/oozieSmoke2.sh
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/files/oozieSmoke2.sh
@@ -35,10 +35,10 @@ function checkOozieJobStatus {
   num_of_tries=${num_of_tries:-10}
   local i=0
   local rc=1
-  local cmd="source ${oozie_conf_dir}/oozie-env.sh ; /usr/bin/oozie job -oozie 
${OOZIE_SERVER} -info $job_id"
-  su - ${smoke_test_user} -c "$cmd"
+  local cmd="source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie 
job -oozie ${OOZIE_SERVER} -info $job_id"
+  su -s /bin/bash - ${smoke_test_user} -c "$cmd"
   while [ $i -lt $num_of_tries ] ; do
-    cmd_output=`su - ${smoke_test_user} -c "$cmd"`
+    cmd_output=`su -s /bin/bash - ${smoke_test_user} -c "$cmd"`
     (IFS='';echo $cmd_output)
     act_status=$(IFS='';echo $cmd_output | grep ^Status | cut -d':' -f2 | sed 
's| ||g')
     echo "workflow_status=$act_status"
@@ -58,11 +58,13 @@ function checkOozieJobStatus {
 }
 
 export oozie_conf_dir=$1
-export hadoop_conf_dir=$2
-export smoke_test_user=$3
-export security_enabled=$4
-export smoke_user_keytab=$5
-export kinit_path_local=$6
+export oozie_bin_dir=$2
+export hadoop_conf_dir=$3
+export hadoop_bin_dir=$4
+export smoke_test_user=$5
+export security_enabled=$6
+export smoke_user_keytab=$7
+export kinit_path_local=$8
 
 export OOZIE_EXIT_CODE=0
 export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml 
yarn.resourcemanager.address`
@@ -77,6 +79,9 @@ fi
   
 
 export OOZIE_EXAMPLES_DIR=`$LIST_PACKAGE_FILES_CMD oozie-client | grep 
'oozie-examples.tar.gz$' | xargs dirname`
+if [[ -z "$OOZIE_EXAMPLES_DIR" ]] ; then
+  export OOZIE_EXAMPLES_DIR='/usr/bigtop/current/oozie-client/doc/'
+fi
 cd $OOZIE_EXAMPLES_DIR
 
 tar -zxf oozie-examples.tar.gz
@@ -93,14 +98,14 @@ else
   kinitcmd=""
 fi
 
-su - ${smoke_test_user} -c "hdfs dfs -rm -r examples"
-su - ${smoke_test_user} -c "hdfs dfs -rm -r input-data"
-su - ${smoke_test_user} -c "hdfs dfs -copyFromLocal 
$OOZIE_EXAMPLES_DIR/examples examples"
-su - ${smoke_test_user} -c "hdfs dfs -copyFromLocal 
$OOZIE_EXAMPLES_DIR/examples/input-data input-data"
+su -s /bin/bash - ${smoke_test_user} -c "${hadoop_bin_dir}/hdfs --config 
${hadoop_conf_dir} dfs -rm -r examples"
+su -s /bin/bash - ${smoke_test_user} -c "${hadoop_bin_dir}/hdfs --config 
${hadoop_conf_dir} dfs -rm -r input-data"
+su -s /bin/bash - ${smoke_test_user} -c "${hadoop_bin_dir}/hdfs --config 
${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples examples"
+su -s /bin/bash - ${smoke_test_user} -c "${hadoop_bin_dir}/hdfs --config 
${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples/input-data 
input-data"
 
-cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; /usr/bin/oozie 
-Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config 
$OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties  -run"
+cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie 
-Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config 
$OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties  -run"
 echo $cmd
-job_info=`su - ${smoke_test_user} -c "$cmd" | grep "job:"`
+job_info=`su -s /bin/bash - ${smoke_test_user} -c "$cmd" | grep "job:"`
 job_id="`echo $job_info | cut -d':' -f2`"
 checkOozieJobStatus "$job_id" 15
 OOZIE_EXIT_CODE="$?"

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie.py
index e125a4d..cbb5e69 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie.py
@@ -31,6 +31,11 @@ def oozie(is_server=False # TODO: see if see can remove this
                          owner=params.oozie_user,
                          mode=params.oozie_hdfs_user_mode
     )
+  Directory( params.conf_dir,
+             recursive = True,
+             owner = params.oozie_user,
+             group = params.user_group
+  )
   XmlConfig( "oozie-site.xml",
     conf_dir = params.conf_dir,
     configurations = params.config['configurations']['oozie-site'],
@@ -39,16 +44,23 @@ def oozie(is_server=False # TODO: see if see can remove this
     group = params.user_group,
     mode = 0664
   )
-  Directory( params.conf_dir,
-    owner = params.oozie_user,
-    group = params.user_group
-  )
-  
   File(format("{conf_dir}/oozie-env.sh"),
     owner=params.oozie_user,
     content=InlineTemplate(params.oozie_env_sh_template)
   )
 
+  if params.security_enabled:
+    tomcat_conf_dir = format("{tomcat_conf_secure}")
+  else:
+    tomcat_conf_dir = format("{tomcat_conf}")
+
+  File(format("{tomcat_conf_dir}/catalina.properties"),
+    content = Template("catalina.properties.j2"),
+    owner = params.oozie_user,
+    group = params.user_group,
+    mode = 0755
+  )
+
   if (params.log4j_props != None):
     File(format("{params.conf_dir}/oozie-log4j.properties"),
       mode=0644,
@@ -121,32 +133,50 @@ def oozie_server_specific(
     not_if="ls {pid_file} >/dev/null 2>&1 && !(ps `cat {pid_file}` >/dev/null 
2>&1)"
   )
   
-  oozie_server_directorties = [params.oozie_pid_dir, params.oozie_log_dir, 
params.oozie_tmp_dir, params.oozie_data_dir, params.oozie_lib_dir, 
params.oozie_webapps_dir]            
+  oozie_server_directorties = [params.oozie_pid_dir, params.oozie_log_dir, 
params.oozie_tmp_dir, params.oozie_data_dir, params.oozie_lib_dir, 
params.oozie_webapps_dir, params.oozie_webapps_conf_dir, 
params.oozie_server_dir]
   Directory( oozie_server_directorties,
     owner = params.oozie_user,
     mode = 0755,
     recursive = True
   )
 
-  cmd1 = "cd /usr/lib/oozie && tar -xvf oozie-sharelib.tar.gz"
-  cmd2 =  format("cd /usr/lib/oozie && mkdir -p {oozie_tmp_dir}")
-  
-  # this is different for HDP1
-  cmd3 = format("cd /usr/lib/oozie && chown {oozie_user}:{user_group} 
{oozie_tmp_dir} && mkdir -p {oozie_libext_dir} && cp {ext_js_path} 
{oozie_libext_dir}")
+  cmd1 = "sh"
+
   if params.jdbc_driver_name=="com.mysql.jdbc.Driver" or 
params.jdbc_driver_name=="oracle.jdbc.driver.OracleDriver":
-    cmd3 += format(" && cp {jdbc_driver_jar} {oozie_libext_dir}")
-  #falcon el extension
-  if params.has_falcon_host:
-    cmd3 += format(' && cp 
{falcon_home}/oozie/ext/falcon-oozie-el-extension-*.jar {oozie_libext_dir}')
-  # this is different for HDP1
-  cmd4 = format("cd {oozie_tmp_dir} && {oozie_setup_sh} prepare-war")
+    cmd1 += format(" && cp {jdbc_driver_jar} {oozie_lib_dir}")
 
   no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat {pid_file}` 
>/dev/null 2>&1")
-  Execute( [cmd1, cmd2, cmd3],
+  Execute( [cmd1],
     not_if  = no_op_test
   )
-  Execute( cmd4,
-    user = params.oozie_user,
-    not_if  = no_op_test
+
+# the version of hadoop-auth jar files in bigtop 0.8 oozie is wrong
+def correct_hadoop_auth_jar_files():
+
+  hadoop_auth_jar_file = "/usr/lib/hadoop/hadoop-auth-2.4.1.jar"
+
+  if not os.path.exists(hadoop_auth_jar_file):
+    raise Fail("Could not find %s" % (hadoop_auth_jar_file))
+
+  commands = ' '.join(
+    (
+      "if [ -f /usr/lib/oozie/lib/hadoop-auth-2.0.2-alpha.jar ];",
+      "then",
+      "rm -rf /usr/lib/oozie/lib/hadoop-auth-2.0.2-alpha.jar;",
+      "cp " + hadoop_auth_jar_file + " /usr/lib/oozie/lib;",
+      "fi"
+    )
   )
-  
+  Execute(commands)
+
+  commands = ' '.join(
+    (
+      "if [ -f /usr/lib/oozie/libtools/hadoop-auth-2.0.2-alpha.jar ];",
+      "then",
+      "rm -rf /usr/lib/oozie/libtools/hadoop-auth-2.0.2-alpha.jar;",
+      "cp " + hadoop_auth_jar_file + " /usr/lib/oozie/libtools;",
+      "fi"
+    )
+  )
+  Execute(commands)
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie_client.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie_client.py
index f77a8db..372a8de 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie_client.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie_client.py
@@ -22,12 +22,14 @@ import sys
 from resource_management import *
 
 from oozie import oozie
+from oozie import correct_hadoop_auth_jar_files
 from oozie_service import oozie_service
 
          
 class OozieClient(Script):
   def install(self, env):
     self.install_packages(env)
+    correct_hadoop_auth_jar_files()
     self.configure(env)
     
   def configure(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie_server.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie_server.py
index 70414fc..a6adb2d 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie_server.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie_server.py
@@ -22,13 +22,15 @@ import sys
 from resource_management import *
 
 from oozie import oozie
+from oozie import correct_hadoop_auth_jar_files
 from oozie_service import oozie_service
 
          
 class OozieServer(Script):
   def install(self, env):
     self.install_packages(env)
-    
+    correct_hadoop_auth_jar_files()
+
   def configure(self, env):
     import params
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie_service.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie_service.py
index 766f9c1..40b78b7 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie_service.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/oozie_service.py
@@ -27,7 +27,7 @@ def oozie_service(action = 'start'): # 'start' or 'stop'
   no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat {pid_file}` 
>/dev/null 2>&1")
   
   if action == 'start':
-    start_cmd = format("cd {oozie_tmp_dir} && 
/usr/lib/oozie/bin/oozie-start.sh")
+    start_cmd = "service oozie start"
     
     if params.jdbc_driver_name == "com.mysql.jdbc.Driver" or \
        params.jdbc_driver_name == "org.postgresql.Driver" or \
@@ -36,37 +36,36 @@ def oozie_service(action = 'start'): # 'start' or 'stop'
     else:
       db_connection_check_command = None
       
-    cmd1 =  format("cd {oozie_tmp_dir} && /usr/lib/oozie/bin/ooziedb.sh create 
-sqlfile oozie.sql -run")
-    cmd2 =  format("{kinit_if_needed} {put_shared_lib_to_hdfs_cmd} ; hadoop 
dfs -chmod -R 755 {oozie_hdfs_user_dir}/share")
+    cmd1 =  "service oozie init"
+    cmd2 =  format("{kinit_if_needed} {put_shared_lib_to_hdfs_cmd} ; hadoop 
--config {hadoop_conf_dir} dfs -chmod -R 755 {oozie_hdfs_user_dir}/share")
 
     if not os.path.isfile(params.jdbc_driver_jar) and params.jdbc_driver_name 
== "org.postgresql.Driver":
-      print "ERROR: jdbc file " + params.jdbc_driver_jar + " is unavailable. 
Please, follow next steps:\n" \
-        "1) Download postgresql-9.0-801.jdbc4.jar.\n2) Create needed 
directory: mkdir -p /usr/lib/oozie/libserver/\n" \
+      print format("ERROR: jdbc file {jdbc_driver_jar} is unavailable. Please, 
follow next steps:\n" \
+        "1) Download postgresql-9.0-801.jdbc4.jar.\n2) Create needed 
directory: mkdir -p {oozie_home}/libserver/\n" \
         "3) Copy postgresql-9.0-801.jdbc4.jar to newly created dir: cp 
/path/to/jdbc/postgresql-9.0-801.jdbc4.jar " \
-        "/usr/lib/oozie/libserver/\n4) Copy postgresql-9.0-801.jdbc4.jar to 
libext: cp " \
-        "/path/to/jdbc/postgresql-9.0-801.jdbc4.jar /usr/lib/oozie/libext/\n"
+        "{oozie_home}/libserver/\n4) Copy postgresql-9.0-801.jdbc4.jar to 
libext: cp " \
+        "/path/to/jdbc/postgresql-9.0-801.jdbc4.jar {oozie_home}/libext/\n")
       exit(1)
 
     if db_connection_check_command:
       Execute( db_connection_check_command, tries=5, try_sleep=10)
                   
     Execute( cmd1,
-      user = params.oozie_user,
       not_if  = no_op_test,
       ignore_failures = True
     ) 
     
     Execute( cmd2,
-      user = params.oozie_user,       
-      not_if = format("{kinit_if_needed} hadoop dfs -ls /user/oozie/share | 
awk 'BEGIN {{count=0;}} /share/ {{count++}} END {{if (count > 0) {{exit 0}} 
else {{exit 1}}}}'")
+      user = params.oozie_user,
+      not_if = format("{kinit_if_needed} hadoop --config {hadoop_conf_dir} dfs 
-ls /user/oozie/share | awk 'BEGIN {{count=0;}} /share/ {{count++}} END {{if 
(count > 0) {{exit 0}} else {{exit 1}}}}'"),
+      path = params.execute_path
     )
     
     Execute( start_cmd,
-      user = params.oozie_user,
       not_if  = no_op_test,
     )
   elif action == 'stop':
-    stop_cmd  = format("su - {oozie_user} -c  'cd {oozie_tmp_dir} && 
/usr/lib/oozie/bin/oozie-stop.sh' && rm -f {pid_file}")
+    stop_cmd  = "service oozie stop"
     Execute( stop_cmd,
       only_if  = no_op_test
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/params.py
index a484c0e..f13fbaf 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/params.py
@@ -20,31 +20,87 @@ limitations under the License.
 
 from resource_management import *
 import status_params
+import os
+import fnmatch
 
 # server configurations
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
+#RPM versioning support
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
+
+#hadoop params
+if rpm_version:
+  hadoop_bin_dir = "/usr/bigtop/current/hadoop-client/bin"
+  hadoop_lib_home = "/usr/bigtop/current/hadoop-client/lib"
+  hive_lib_dir = "/usr/bigtop/current/hive-client/lib"
+  oozie_lib_dir = "/usr/bigtop/current/oozie-client/"
+  oozie_setup_sh = "/usr/bigtop/current/oozie-client/bin/oozie-setup.sh"
+  oozie_webapps_dir = 
"/usr/bigtop/current/oozie-client/tomcat-deployment/webapps"
+  oozie_webapps_conf_dir = 
"/usr/bigtop/current/oozie-client/tomcat-deployment/conf"
+  oozie_libext_dir = "/usr/bigtop/current/oozie-client/libext"
+  oozie_server_dir = "/usr/bigtop/current/oozie-client/tomcat-deployment"
+  oozie_shared_lib = "/usr/bigtop/current/oozie-client/oozie-sharelib.tar.gz"
+  oozie_home = "/usr/bigtop/current/oozie-client"
+  oozie_bin_dir = "/usr/bigtop/current/oozie-client/bin"
+  falcon_home = '/usr/bigtop/current/falcon-client'
+  tomcat_conf = "/etc/oozie/tomcat-conf.http/conf"
+  tomcat_conf_secure = "/etc/oozie/tomcat-conf.https/conf"
+
+else:
+  hadoop_bin_dir = "/usr/bin"
+  hadoop_lib_home = "/usr/lib/hadoop/lib"
+  hive_lib_dir = "/usr/lib/hive/lib"
+  oozie_lib_dir = "/var/lib/oozie/"
+  oozie_setup_sh = "/usr/lib/oozie/bin/oozie-setup.sh"
+  oozie_webapps_dir = "/var/lib/oozie/tomcat-deployment/webapps/"
+  oozie_webapps_conf_dir = "/var/lib/oozie/tomcat-deployment/conf"
+  oozie_libext_dir = "/usr/lib/oozie/libext"
+  oozie_server_dir = "/var/lib/oozie/tomcat-deployment"
+  oozie_shared_lib = "/usr/lib/oozie/oozie-sharelib.tar.gz"
+  oozie_home = "/usr/lib/oozie"
+  oozie_bin_dir = "/usr/bin"
+  falcon_home = '/usr/lib/falcon'
+  tomcat_conf = "/etc/oozie/tomcat-conf.http/conf"
+  tomcat_conf_secure = "/etc/oozie/tomcat-conf.https/conf"
+
+execute_path = oozie_bin_dir + os.pathsep + hadoop_bin_dir
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+conf_dir = "/etc/oozie/conf"
 oozie_user = config['configurations']['oozie-env']['oozie_user']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
-conf_dir = "/etc/oozie/conf"
-hadoop_conf_dir = "/etc/hadoop/conf"
 user_group = config['configurations']['cluster-env']['user_group']
 jdk_location = config['hostLevelParams']['jdk_location']
 check_db_connection_jar_name = "DBConnectionVerification.jar"
 check_db_connection_jar = 
format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
-hadoop_prefix = "/usr"
 oozie_tmp_dir = "/var/tmp/oozie"
 oozie_hdfs_user_dir = format("/user/{oozie_user}")
 oozie_pid_dir = status_params.oozie_pid_dir
 pid_file = status_params.pid_file
 hadoop_jar_location = "/usr/lib/hadoop/"
-hdp_stack_version = config['hostLevelParams']['stack_version']
-# for HDP1 it's "/usr/share/HDP-oozie/ext.zip"
-ext_js_path = "/usr/share/HDP-oozie/ext-2.2.zip"
-oozie_libext_dir = "/usr/lib/oozie/libext"
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 
+hive_jar_files = ""
+
+if not os.path.exists(hive_lib_dir):
+    raise Fail("Could not find Hive library directory: %s" % (hive_lib_dir))
+
+for entry in os.listdir(hive_lib_dir):
+    absolute_path = os.path.join(hive_lib_dir, entry)
+    if os.path.isfile(absolute_path) and not os.path.islink(absolute_path):
+        if fnmatch.fnmatchcase(entry, "hive-*.jar"):
+            if (len(hive_jar_files) == 0):
+                hive_jar_files = absolute_path
+            else:
+                hive_jar_files = hive_jar_files + "," + absolute_path
+
+catalina_properties_common_loader = 
"/usr/lib/hive-hcatalog/share/hcatalog/*.jar,/usr/lib/hive-hcatalog/share/webhcat/java-client/*.jar"
+
+if (len(hive_jar_files) != 0):
+    catalina_properties_common_loader = hive_jar_files + "," + 
catalina_properties_common_loader
+
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 oozie_service_keytab = 
config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.keytab.file']
 oozie_principal = 
config['configurations']['oozie-site']['oozie.service.HadoopAccessorService.kerberos.principal']
@@ -53,7 +109,6 @@ oozie_keytab = 
config['configurations']['oozie-env']['oozie_keytab']
 oozie_env_sh_template = config['configurations']['oozie-env']['content']
 
 oracle_driver_jar_name = "ojdbc6.jar"
-java_share_dir = "/usr/share/java"
 
 java_home = config['hostLevelParams']['java_home']
 oozie_metastore_user_name = 
config['configurations']['oozie-site']['oozie.service.JPAService.jdbc.username']
@@ -64,24 +119,16 @@ oozie_data_dir = 
config['configurations']['oozie-env']['oozie_data_dir']
 oozie_server_port = 
get_port_from_url(config['configurations']['oozie-site']['oozie.base.url'])
 oozie_server_admin_port = 
config['configurations']['oozie-env']['oozie_admin_port']
 oozie_env_sh_template = config['configurations']['oozie-env']['content']
-oozie_lib_dir = "/var/lib/oozie/"
-oozie_webapps_dir = "/var/lib/oozie/oozie-server/webapps/"
-oozie_setup_sh = "/usr/lib/oozie/bin/oozie-setup.sh"
-oozie_shared_lib = "/usr/lib/oozie/share"
 fs_root = config['configurations']['core-site']['fs.defaultFS']
 
-if str(hdp_stack_version).startswith('2.0') or 
str(hdp_stack_version).startswith('2.1'):
-  put_shared_lib_to_hdfs_cmd = format("hadoop dfs -put {oozie_shared_lib} 
{oozie_hdfs_user_dir}")
-# for newer
-else:
-  put_shared_lib_to_hdfs_cmd = format("{oozie_setup_sh} sharelib create -fs 
{fs_root} -locallib {oozie_shared_lib}")
+put_shared_lib_to_hdfs_cmd = format("{oozie_setup_sh} sharelib create -fs 
{fs_root} -locallib {oozie_shared_lib}")
   
 jdbc_driver_name = 
default("/configurations/oozie-site/oozie.service.JPAService.jdbc.driver", "")
 
 if jdbc_driver_name == "com.mysql.jdbc.Driver":
   jdbc_driver_jar = "/usr/share/java/mysql-connector-java.jar"
 elif jdbc_driver_name == "org.postgresql.Driver":
-  jdbc_driver_jar = "/usr/lib/oozie/libserver/postgresql-9.0-801.jdbc4.jar"
+  jdbc_driver_jar = 
format("{oozie_home}/libserver/postgresql-9.0-801.jdbc4.jar")
 elif jdbc_driver_name == "oracle.jdbc.driver.OracleDriver":
   jdbc_driver_jar = "/usr/share/java/ojdbc6.jar"
 else:
@@ -91,7 +138,6 @@ hostname = config["hostname"]
 ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
 falcon_host = default("/clusterHostInfo/falcon_server_hosts", [])
 has_falcon_host = not len(falcon_host)  == 0
-falcon_home = '/usr/lib/falcon'
 
 #oozie-log4j.properties
 if (('oozie-log4j' in config['configurations']) and ('content' in 
config['configurations']['oozie-log4j'])):
@@ -103,7 +149,6 @@ oozie_hdfs_user_dir = format("/user/{oozie_user}")
 oozie_hdfs_user_mode = 0775
 #for create_hdfs_directory
 hostname = config["hostname"]
-hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
@@ -117,5 +162,6 @@ HdfsDirectory = functools.partial(
   hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local
+  kinit_path_local = kinit_path_local,
+  bin_dir = hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/service_check.py
index 7924145..40f8b8d 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/scripts/service_check.py
@@ -43,12 +43,13 @@ def oozie_smoke_shell_file(
   os_family = System.get_instance().os_family
   
   if params.security_enabled:
-    sh_cmd = format("{tmp_dir}/{file_name} {os_family} {conf_dir} 
{hadoop_conf_dir} {smokeuser} {security_enabled} {smokeuser_keytab} 
{kinit_path_local}")
+    sh_cmd = format("{tmp_dir}/{file_name} {os_family} {conf_dir} 
{oozie_bin_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} 
{security_enabled} {smokeuser_keytab} {kinit_path_local}")
   else:
-    sh_cmd = format("{tmp_dir}/{file_name} {os_family} {conf_dir} 
{hadoop_conf_dir} {smokeuser} {security_enabled}")
+    sh_cmd = format("{tmp_dir}/{file_name} {os_family} {conf_dir} 
{oozie_bin_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} 
{security_enabled}")
 
   Execute( format("{tmp_dir}/{file_name}"),
     command   = sh_cmd,
+    path      = params.execute_path,
     tries     = 3,
     try_sleep = 5,
     logoutput = True

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/templates/catalina.properties.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/templates/catalina.properties.j2
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/templates/catalina.properties.j2
new file mode 100644
index 0000000..96fa996
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/templates/catalina.properties.j2
@@ -0,0 +1,81 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#
+# List of comma-separated packages that start with or equal this string
+# will cause a security exception to be thrown when
+# passed to checkPackageAccess unless the
+# corresponding RuntimePermission ("accessClassInPackage."+package) has
+# been granted.
+package.access=sun.,org.apache.catalina.,org.apache.coyote.,org.apache.tomcat.,org.apache.jasper.,sun.beans.
+#
+# List of comma-separated packages that start with or equal this string
+# will cause a security exception to be thrown when
+# passed to checkPackageDefinition unless the
+# corresponding RuntimePermission ("defineClassInPackage."+package) has
+# been granted.
+#
+# by default, no packages are restricted for definition, and none of
+# the class loaders supplied with the JDK call checkPackageDefinition.
+#
+package.definition=sun.,java.,org.apache.catalina.,org.apache.coyote.,org.apache.tomcat.,org.apache.jasper.
+
+#
+#
+# List of comma-separated paths defining the contents of the "common"
+# classloader. Prefixes should be used to define what is the repository type.
+# Path may be relative to the CATALINA_HOME or CATALINA_BASE path or absolute.
+# If left as blank,the JVM system loader will be used as Catalina's "common"
+# loader.
+# Examples:
+#     "foo": Add this folder as a class repository
+#     "foo/*.jar": Add all the JARs of the specified folder as class
+#                  repositories
+#     "foo/bar.jar": Add bar.jar as a class repository
+common.loader=/var/lib/oozie/*.jar,/usr/lib/hadoop/client/*.jar,{{catalina_properties_common_loader}},/usr/lib/oozie/libserver/*.jar,${catalina.home}/lib,${catalina.home}/lib/*.jar
+
+#
+# List of comma-separated paths defining the contents of the "server"
+# classloader. Prefixes should be used to define what is the repository type.
+# Path may be relative to the CATALINA_HOME or CATALINA_BASE path or absolute.
+# If left as blank, the "common" loader will be used as Catalina's "server"
+# loader.
+# Examples:
+#     "foo": Add this folder as a class repository
+#     "foo/*.jar": Add all the JARs of the specified folder as class
+#                  repositories
+#     "foo/bar.jar": Add bar.jar as a class repository
+server.loader=
+
+#
+# List of comma-separated paths defining the contents of the "shared"
+# classloader. Prefixes should be used to define what is the repository type.
+# Path may be relative to the CATALINA_BASE path or absolute. If left as blank,
+# the "common" loader will be used as Catalina's "shared" loader.
+# Examples:
+#     "foo": Add this folder as a class repository
+#     "foo/*.jar": Add all the JARs of the specified folder as class
+#                  repositories
+#     "foo/bar.jar": Add bar.jar as a class repository
+# Please note that for single jars, e.g. bar.jar, you need the URL form
+# starting with file:.
+shared.loader=
+
+#
+# String cache configuration.
+tomcat.util.buf.StringCache.byte.enabled=true
+#tomcat.util.buf.StringCache.char.enabled=true
+#tomcat.util.buf.StringCache.trainThreshold=500000
+#tomcat.util.buf.StringCache.cacheSize=5000
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/templates/oozie-log4j.properties.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/templates/oozie-log4j.properties.j2
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/templates/oozie-log4j.properties.j2
index b571a76..8c9f25e 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/templates/oozie-log4j.properties.j2
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/OOZIE/package/templates/oozie-log4j.properties.j2
@@ -51,7 +51,7 @@ log4j.appender.oozie.DatePattern='.'yyyy-MM-dd-HH
 log4j.appender.oozie.File=${oozie.log.dir}/oozie.log
 log4j.appender.oozie.Append=true
 log4j.appender.oozie.layout=org.apache.log4j.PatternLayout
-log4j.appender.oozie.layout.ConversionPattern=%d{ISO8601} %5p %c{1}:%L - %m%n
+log4j.appender.oozie.layout.ConversionPattern=%d{ISO8601} %5p %c{1}:%L - 
SERVER[${oozie.instance.id}] %m%n
 
 log4j.appender.oozieops=org.apache.log4j.DailyRollingFileAppender
 log4j.appender.oozieops.DatePattern='.'yyyy-MM-dd

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/params.py
index 1b522b8..b334f3b 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/params.py
@@ -25,8 +25,21 @@ from resource_management import *
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
 
-pig_conf_dir = "/etc/pig/conf"
+#RPM versioning support
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
+
+#hadoop params
+if rpm_version:
+  hadoop_bin_dir = "/usr/bigtop/current/hadoop-client/bin"
+  hadoop_home = '/usr/bigtop/current/hadoop-client'
+  pig_bin_dir = '/usr/bigtop/current/pig-client/bin'
+else:
+  hadoop_bin_dir = "/usr/bin"
+  hadoop_home = '/usr'
+  pig_bin_dir = ""
+
 hadoop_conf_dir = "/etc/hadoop/conf"
+pig_conf_dir = "/etc/pig/conf"
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 smokeuser = config['configurations']['cluster-env']['smokeuser']
@@ -38,7 +51,6 @@ pig_env_sh_template = 
config['configurations']['pig-env']['content']
 
 # not supporting 32 bit jdk.
 java64_home = config['hostLevelParams']['java_home']
-hadoop_home = "/usr"
 
 pig_properties = config['configurations']['pig-properties']['content']
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/pig.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/pig.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/pig.py
index 8326262..afdba8d 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/pig.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/pig.py
@@ -26,6 +26,7 @@ def pig():
   import params
 
   Directory( params.pig_conf_dir,
+    recursive = True,
     owner = params.hdfs_user,
     group = params.user_group
   )

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/service_check.py
index 8431b6d..7619bd6 100644
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/PIG/package/scripts/service_check.py
@@ -31,7 +31,7 @@ class PigServiceCheck(Script):
 
     cleanup_cmd = format("dfs -rmr {output_file} {input_file}")
     #cleanup put below to handle retries; if retrying there wil be a stale 
file that needs cleanup; exit code is fn of second command
-    create_file_cmd = format("{cleanup_cmd}; hadoop dfs -put /etc/passwd 
{input_file} ") #TODO: inconsistent that second command needs hadoop
+    create_file_cmd = format("{cleanup_cmd}; hadoop --config {hadoop_conf_dir} 
dfs -put /etc/passwd {input_file} ") #TODO: inconsistent that second command 
needs hadoop
     test_cmd = format("fs -test -e {output_file}")
 
     ExecuteHadoop( create_file_cmd,
@@ -42,7 +42,8 @@ class PigServiceCheck(Script):
       # for kinit run
       keytab = params.smoke_user_keytab,
       security_enabled = params.security_enabled,
-      kinit_path_local = params.kinit_path_local
+      kinit_path_local = params.kinit_path_local,
+      bin_dir = params.hadoop_bin_dir
     )
 
     File( format("{tmp_dir}/pigSmoke.sh"),
@@ -53,13 +54,14 @@ class PigServiceCheck(Script):
     Execute( format("pig {tmp_dir}/pigSmoke.sh"),
       tries     = 3,
       try_sleep = 5,
-      path      = '/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+      path      = 
format('{pig_bin_dir}:/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'),
       user      = params.smokeuser
     )
 
     ExecuteHadoop( test_cmd,
       user      = params.smokeuser,
-      conf_dir = params.hadoop_conf_dir
+      conf_dir = params.hadoop_conf_dir,
+      bin_dir = params.hadoop_bin_dir
     )
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/configuration/sqoop-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/configuration/sqoop-env.xml
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/configuration/sqoop-env.xml
deleted file mode 100644
index b598215..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/configuration/sqoop-env.xml
+++ /dev/null
@@ -1,54 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-  <!-- sqoop-env.sh -->
-  <property>
-    <name>content</name>
-    <description>This is the jinja template for sqoop-env.sh file</description>
-    <value>
-# Set Hadoop-specific environment variables here.
-
-#Set path to where bin/hadoop is available
-#Set path to where bin/hadoop is available
-export HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}
-
-#set the path to where bin/hbase is available
-export HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}
-
-#Set the path to where bin/hive is available
-export HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}
-
-#Set the path for where zookeper config dir is
-export ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}
-
-# add libthrift in hive to sqoop class path first so hive imports work
-export SQOOP_USER_CLASSPATH="`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}"
-    </value>
-  </property>
-  <property>
-    <name>sqoop_user</name>
-    <description>User to run Sqoop as</description>
-    <property-type>USER</property-type>
-    <value>sqoop</value>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/metainfo.xml
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/metainfo.xml
deleted file mode 100644
index 1f4a90b..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/metainfo.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>SQOOP</name>
-      <displayName>Sqoop</displayName>
-      <comment>Tool for transferring bulk data between Apache Hadoop and
-        structured data stores such as relational databases
-      </comment>
-      <version>1.4.4.2.0</version>
-
-      <components>
-        <component>
-          <name>SQOOP</name>
-          <displayName>Sqoop</displayName>
-          <category>CLIENT</category>
-          <cardinality>1+</cardinality>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/sqoop_client.py</script>
-            <scriptType>PYTHON</scriptType>
-          </commandScript>
-          <configFiles>
-            <configFile>
-              <type>env</type>
-              <fileName>sqoop-env.sh</fileName>
-              <dictionaryName>sqoop-env</dictionaryName>
-            </configFile>
-          </configFiles>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>sqoop</name>
-            </package>
-            <package>
-              <name>mysql-connector-java</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>HDFS</service>
-      </requiredServices>
-      
-      <configuration-dependencies>
-        <config-type>sqoop-env</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/__init__.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/__init__.py
deleted file mode 100644
index 5561e10..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/params.py
deleted file mode 100644
index 144a587..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/params.py
+++ /dev/null
@@ -1,37 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-config = Script.get_config()
-
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-user_group = config['configurations']['cluster-env']['user_group']
-sqoop_env_sh_template = config['configurations']['sqoop-env']['content']
-
-sqoop_conf_dir = "/usr/lib/sqoop/conf"
-hbase_home = "/usr"
-hive_home = "/usr"
-zoo_conf_dir = "/etc/zookeeper"
-sqoop_lib = "/usr/lib/sqoop/lib"
-sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
-
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/service_check.py
deleted file mode 100644
index c42501a..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/service_check.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-
-from resource_management import *
-
-
-class SqoopServiceCheck(Script):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-    if params.security_enabled:
-        Execute(format("{kinit_path_local}  -kt {smoke_user_keytab} 
{smokeuser}"))
-    Execute("sqoop version",
-            user = params.smokeuser,
-            logoutput = True
-    )
-
-if __name__ == "__main__":
-  SqoopServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/sqoop.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/sqoop.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/sqoop.py
deleted file mode 100644
index f0bd354..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/sqoop.py
+++ /dev/null
@@ -1,57 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import sys
-
-def sqoop(type=None):
-  import params
-  Link(params.sqoop_lib + "/mysql-connector-java.jar",
-       to = '/usr/share/java/mysql-connector-java.jar'
-  ) 
-  Directory(params.sqoop_conf_dir,
-            owner = params.sqoop_user,
-            group = params.user_group
-  )
-  
-  File(format("{sqoop_conf_dir}/sqoop-env.sh"),
-    owner=params.sqoop_user,
-    content=InlineTemplate(params.sqoop_env_sh_template)
-  )
-  
-  File (params.sqoop_conf_dir + "/sqoop-env-template.sh",
-          owner = params.sqoop_user,
-          group = params.user_group
-  )
-  File (params.sqoop_conf_dir + "/sqoop-site-template.xml",
-         owner = params.sqoop_user,
-         group = params.user_group
-  )
-  File (params.sqoop_conf_dir + "/sqoop-site.xml",
-         owner = params.sqoop_user,
-         group = params.user_group
-  )
-  pass
-
-def sqoop_TemplateConfig(name, tag=None):
-  import params
-  TemplateConfig( format("{sqoop_conf_dir}/{name}"),
-                  owner = params.sqoop_user,
-                  template_tag = tag
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/sqoop_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/sqoop_client.py
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/sqoop_client.py
deleted file mode 100644
index 6829557..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/SQOOP/package/scripts/sqoop_client.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-
-from sqoop import sqoop
-
-
-class SqoopClient(Script):
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    sqoop(type='client')
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-if __name__ == "__main__":
-  SqoopClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/configuration/webhcat-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/configuration/webhcat-env.xml
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/configuration/webhcat-env.xml
deleted file mode 100644
index 304bbb7..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/configuration/webhcat-env.xml
+++ /dev/null
@@ -1,54 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-
-<configuration>
-  <!-- webhcat-env.sh -->
-  <property>
-    <name>content</name>
-    <description>webhcat-env.sh content</description>
-    <value>
-# The file containing the running pid
-PID_FILE={{pid_file}}
-
-TEMPLETON_LOG_DIR={{templeton_log_dir}}/
-
-
-WEBHCAT_LOG_DIR={{templeton_log_dir}}/
-
-# The console error log
-ERROR_LOG={{templeton_log_dir}}/webhcat-console-error.log
-
-# The console log
-CONSOLE_LOG={{templeton_log_dir}}/webhcat-console.log
-
-#TEMPLETON_JAR=templeton_jar_name
-
-#HADOOP_PREFIX=hadoop_prefix
-
-#HCAT_PREFIX=hive_prefix
-
-# Set HADOOP_HOME to point to a specific hadoop install directory
-export HADOOP_HOME=/usr/lib/hadoop
-    </value>
-  </property>
-  
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/configuration/webhcat-site.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/configuration/webhcat-site.xml
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/configuration/webhcat-site.xml
deleted file mode 100644
index 0523dab..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/configuration/webhcat-site.xml
+++ /dev/null
@@ -1,138 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-<!-- The default settings for Templeton. -->
-<!-- Edit templeton-site.xml to change settings for your local -->
-<!-- install. -->
-
-<configuration supports_final="true">
-
-  <property>
-    <name>templeton.port</name>
-      <value>50111</value>
-    <description>The HTTP port for the main server.</description>
-  </property>
-
-  <property>
-    <name>templeton.hadoop.conf.dir</name>
-    <value>/etc/hadoop/conf</value>
-    <description>The path to the Hadoop configuration.</description>
-  </property>
-
-  <property>
-    <name>templeton.jar</name>
-    <value>/usr/lib/hcatalog/share/webhcat/svr/webhcat.jar</value>
-    <description>The path to the Templeton jar file.</description>
-  </property>
-
-  <property>
-    <name>templeton.libjars</name>
-    <value>/usr/lib/zookeeper/zookeeper.jar</value>
-    <description>Jars to add the the classpath.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.hadoop</name>
-    <value>/usr/bin/hadoop</value>
-    <description>The path to the Hadoop executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.archive</name>
-    <value>hdfs:///apps/webhcat/pig.tar.gz</value>
-    <description>The path to the Pig archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.pig.path</name>
-    <value>pig.tar.gz/pig/bin/pig</value>
-    <description>The path to the Pig executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat</name>
-    <value>/usr/bin/hcat</value>
-    <description>The path to the hcatalog executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.archive</name>
-    <value>hdfs:///apps/webhcat/hive.tar.gz</value>
-    <description>The path to the Hive archive.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.home</name>
-    <value>hive.tar.gz/hive</value>
-    <description>The path to the Hive home within the tar. Has no effect if 
templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hcat.home</name>
-    <value>hive.tar.gz/hive/hcatalog</value>
-    <description>The path to the HCat home within the tar. Has no effect if 
templeton.hive.archive is not set.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.path</name>
-    <value>hive.tar.gz/hive/bin/hive</value>
-    <description>The path to the Hive executable.</description>
-  </property>
-
-  <property>
-    <name>templeton.hive.properties</name>
-    <value>hive.metastore.local=false, 
hive.metastore.uris=thrift://localhost:9933, 
hive.metastore.sasl.enabled=false</value>
-    <description>Properties to set when running hive.</description>
-  </property>
-
-
-  <property>
-    <name>templeton.zookeeper.hosts</name>
-    <value>localhost:2181</value>
-    <description>ZooKeeper servers, as comma separated host:port 
pairs</description>
-  </property>
-
-  <property>
-    <name>templeton.storage.class</name>
-    <value>org.apache.hive.hcatalog.templeton.tool.ZooKeeperStorage</value>
-    <description>The class to use as storage</description>
-  </property>
-
-  <property>
-   <name>templeton.override.enabled</name>
-   <value>false</value>
-   <description>
-     Enable the override path in templeton.override.jars
-   </description>
- </property>
-
- <property>
-    <name>templeton.streaming.jar</name>
-    <value>hdfs:///apps/webhcat/hadoop-streaming.jar</value>
-    <description>The hdfs path to the Hadoop streaming jar file.</description>
-  </property> 
-
-  <property>
-    <name>templeton.exec.timeout</name>
-    <value>60000</value>
-    <description>Time out for templeton api</description>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/32b1fc38/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/metainfo.xml
 
b/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/metainfo.xml
deleted file mode 100644
index 98f419d..0000000
--- 
a/ambari-server/src/main/resources/stacks/BIGTOP/0.8/services/WEBHCAT/metainfo.xml
+++ /dev/null
@@ -1,107 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>WEBHCAT</name>
-      <displayName>WebHCat</displayName>
-      <comment>Provides a REST-like web API for HCatalog and related Hadoop 
components.</comment>
-      <version>0.13.0.689</version>
-      <components>
-        <component>
-          <name>WEBHCAT_SERVER</name>
-          <displayName>WebHCat Server</displayName>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <dependencies>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>MAPREDUCE2/MAPREDUCE2_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_SERVER</name>
-              <scope>cluster</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-                <co-locate>WEBHCAT/WEBHCAT_SERVER</co-locate>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>ZOOKEEPER/ZOOKEEPER_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>YARN/YARN_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <commandScript>
-            <script>scripts/webhcat_server.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-      </components>
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>hive-hcatalog</name>
-            </package>
-            <package>
-              <name>hive-webhcat</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-      
-      <requiredServices>
-        <service>HIVE</service>
-        <service>ZOOKEEPER</service>
-      </requiredServices>
-      
-      <configuration-dependencies>
-        <config-type>webhcat-site</config-type>
-        <config-type>webhcat-env</config-type>
-      </configuration-dependencies>
-    </service>
-  </services>
-</metainfo>

Reply via email to