http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/params.py
deleted file mode 100644
index 2340df9..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/params.py
+++ /dev/null
@@ -1,228 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import functools
-import os
-import re
-from resource_management.libraries.functions import StackFeature
-from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import get_kinit_path
-from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions.default import default
-from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions.get_stack_version import 
get_stack_version
-from resource_management.libraries.functions.stack_features import 
check_stack_feature
-from resource_management.libraries.functions.version import 
format_stack_version
-from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.script.script import Script
-
-def get_port_from_url(address):
-  if not (address is None):
-    return address.split(':')[-1]
-  else:
-    return address
-
-def extract_spark_version(spark_home):
-  try:
-    with open(spark_home + "/RELEASE") as fline:
-      return re.search('Spark (\d\.\d).+', fline.readline().rstrip()).group(1)
-  except:
-    pass
-  return None
-
-
-# server configurations
-config = Script.get_config()
-stack_root = Script.get_stack_root()
-
-# e.g. 
/var/lib/ambari-agent/cache/stacks/HDP/2.2/services/zeppelin-stack/package
-service_packagedir = os.path.realpath(__file__).split('/scripts')[0]
-
-zeppelin_dirname = 'zeppelin-server'
-
-install_dir = os.path.join(stack_root, "current")
-executor_mem = 
config['configurations']['zeppelin-env']['zeppelin.executor.mem']
-executor_instances = config['configurations']['zeppelin-env'][
-  'zeppelin.executor.instances']
-
-spark_jar_dir = 
config['configurations']['zeppelin-env']['zeppelin.spark.jar.dir']
-spark_jar = format("{spark_jar_dir}/zeppelin-spark-0.5.5-SNAPSHOT.jar")
-setup_view = True
-temp_file = config['configurations']['zeppelin-env']['zeppelin.temp.file']
-
-spark_home = ""
-spark_version = None
-spark2_home = ""
-spark2_version = None
-if 'spark-defaults' in config['configurations']:
-  spark_home = os.path.join(stack_root, "current", 'spark-client')
-  spark_version = extract_spark_version(spark_home)
-if 'spark2-defaults' in config['configurations']:
-  spark2_home = os.path.join(stack_root, "current", 'spark2-client')
-  spark2_version = extract_spark_version(spark2_home)
-
-# New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
-version = default("/commandParams/version", None)
-stack_name = default("/hostLevelParams/stack_name", None)
-
-# params from zeppelin-config
-zeppelin_port = 
str(config['configurations']['zeppelin-config']['zeppelin.server.port'])
-zeppelin_interpreter = None
-if 'zeppelin.interpreter.group.order' in 
config['configurations']['zeppelin-config']:
-  zeppelin_interpreter = str(config['configurations']['zeppelin-config']
-                             ['zeppelin.interpreter.group.order']).split(",")
-
-# params from zeppelin-env
-zeppelin_user = config['configurations']['zeppelin-env']['zeppelin_user']
-zeppelin_group = config['configurations']['zeppelin-env']['zeppelin_group']
-zeppelin_log_dir = config['configurations']['zeppelin-env']['zeppelin_log_dir']
-zeppelin_pid_dir = config['configurations']['zeppelin-env']['zeppelin_pid_dir']
-zeppelin_log_file = os.path.join(zeppelin_log_dir, 'zeppelin-setup.log')
-zeppelin_hdfs_user_dir = format("/user/{zeppelin_user}")
-
-zeppelin_dir = os.path.join(*[install_dir, zeppelin_dirname])
-conf_dir = "/etc/zeppelin/conf"
-external_dependency_conf = "/etc/zeppelin/conf/external-dependency-conf"
-notebook_dir = os.path.join(*[install_dir, zeppelin_dirname, 'notebook'])
-
-# zeppelin-env.sh
-zeppelin_env_content = 
config['configurations']['zeppelin-env']['zeppelin_env_content']
-
-# shiro.ini
-shiro_ini_content = 
config['configurations']['zeppelin-shiro-ini']['shiro_ini_content']
-
-# log4j.properties
-log4j_properties_content = 
config['configurations']['zeppelin-log4j-properties']['log4j_properties_content']
-
-# detect configs
-master_configs = config['clusterHostInfo']
-java64_home = config['hostLevelParams']['java_home']
-ambari_host = str(master_configs['ambari_server_host'][0])
-zeppelin_host = str(master_configs['zeppelin_master_hosts'][0])
-ui_ssl_enabled = config['configurations']['zeppelin-config']['zeppelin.ssl']
-
-# detect HS2 details, if installed
-
-hive_server_host = None
-hive_metastore_host = '0.0.0.0'
-hive_metastore_port = None
-hive_server_port = None
-hive_zookeeper_quorum = None
-hive_server2_support_dynamic_service_discovery = None
-is_hive_installed = False
-hive_zookeeper_namespace = None
-hive_interactive_zookeeper_namespace = None
-
-if 'hive_server_host' in master_configs and 
len(master_configs['hive_server_host']) != 0:
-  is_hive_installed = True
-  spark_hive_properties = {
-    'hive.metastore.uris': 
default('/configurations/hive-site/hive.metastore.uris', '')
-  }
-  hive_server_host = str(master_configs['hive_server_host'][0])
-  hive_metastore_host = str(master_configs['hive_metastore_host'][0])
-  hive_metastore_port = str(
-    get_port_from_url(default('/configurations/hive-site/hive.metastore.uris', 
'')))
-  hive_server_port = 
str(config['configurations']['hive-site']['hive.server2.thrift.http.port'])
-  hive_zookeeper_quorum = 
config['configurations']['hive-site']['hive.zookeeper.quorum']
-  hive_zookeeper_namespace = 
config['configurations']['hive-site']['hive.server2.zookeeper.namespace']
-  hive_server2_support_dynamic_service_discovery = 
config['configurations']['hive-site']['hive.server2.support.dynamic.service.discovery']
-
-hive_server_interactive_hosts = None
-if 'hive_server_interactive_hosts' in master_configs and 
len(master_configs['hive_server_interactive_hosts']) != 0:
-    hive_server_interactive_hosts = 
str(master_configs['hive_server_interactive_hosts'][0])
-    hive_interactive_zookeeper_namespace = 
config['configurations']['hive-interactive-site']['hive.server2.zookeeper.namespace']
-    hive_server_port = 
str(config['configurations']['hive-site']['hive.server2.thrift.http.port'])
-    hive_zookeeper_quorum = 
config['configurations']['hive-site']['hive.zookeeper.quorum']
-    hive_server2_support_dynamic_service_discovery = 
config['configurations']['hive-site']['hive.server2.support.dynamic.service.discovery']
-
-# detect hbase details if installed
-zookeeper_znode_parent = None
-hbase_zookeeper_quorum = None
-is_hbase_installed = False
-if 'hbase_master_hosts' in master_configs and 'hbase-site' in 
config['configurations']:
-  is_hbase_installed = True
-  zookeeper_znode_parent = 
config['configurations']['hbase-site']['zookeeper.znode.parent']
-  hbase_zookeeper_quorum = 
config['configurations']['hbase-site']['hbase.zookeeper.quorum']
-
-# detect spark queue
-if 'spark-defaults' in config['configurations'] and 'spark.yarn.queue' in 
config['configurations']['spark-defaults']:
-  spark_queue = config['configurations']['spark-defaults']['spark.yarn.queue']
-elif 'spark2-defaults' in config['configurations'] and 'spark.yarn.queue' in 
config['configurations']['spark2-defaults']:
-  spark_queue = config['configurations']['spark2-defaults']['spark.yarn.queue']
-else:
-  spark_queue = 'default'
-
-zeppelin_kerberos_keytab = 
config['configurations']['zeppelin-env']['zeppelin.server.kerberos.keytab']
-zeppelin_kerberos_principal = 
config['configurations']['zeppelin-env']['zeppelin.server.kerberos.principal']
-
-# e.g. 2.3
-stack_version_unformatted = config['hostLevelParams']['stack_version']
-
-# e.g. 2.3.0.0
-stack_version_formatted = format_stack_version(stack_version_unformatted)
-
-# e.g. 2.3.0.0-2130
-full_stack_version = default("/commandParams/version", None)
-
-spark_client_version = get_stack_version('spark-client')
-
-hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
-livy_hosts = default("/clusterHostInfo/livy_server_hosts", [])
-livy2_hosts = default("/clusterHostInfo/livy2_server_hosts", [])
-
-livy_livyserver_host = None
-livy_livyserver_port = None
-livy2_livyserver_host = None
-livy2_livyserver_port = None
-if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY, 
stack_version_formatted) and \
-    len(livy_hosts) > 0:
-  livy_livyserver_host = str(livy_hosts[0])
-  livy_livyserver_port = 
config['configurations']['livy-conf']['livy.server.port']
-
-if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY2, 
stack_version_formatted) and \
-    len(livy2_hosts) > 0:
-  livy2_livyserver_host = str(livy2_hosts[0])
-  livy2_livyserver_port = 
config['configurations']['livy2-conf']['livy.server.port']
-
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
-hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
-hdfs_site = config['configurations']['hdfs-site']
-default_fs = config['configurations']['core-site']['fs.defaultFS']
-
-# create partial functions with common arguments for every HdfsResource call
-# to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
-  hdfs_resource_ignore_file="/var/lib/ambari-agent/data/.hdfs_resource_ignore",
-  security_enabled=security_enabled,
-  keytab=hdfs_user_keytab,
-  kinit_path_local=kinit_path_local,
-  hadoop_bin_dir=hadoop_bin_dir,
-  hadoop_conf_dir=hadoop_conf_dir,
-  principal_name=hdfs_principal_name,
-  hdfs_site=hdfs_site,
-  default_fs=default_fs
-)

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/service_check.py
deleted file mode 100644
index bd7c855..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/service_check.py
+++ /dev/null
@@ -1,39 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agree in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions.format import format
-from resource_management.core.resources.system import Execute
-
-class ZeppelinServiceCheck(Script):
-    def service_check(self, env):
-        import params
-        env.set_params(params)
-
-        if params.security_enabled:
-          zeppelin_kinit_cmd = format("{kinit_path_local} -kt 
{zeppelin_kerberos_keytab} {zeppelin_kerberos_principal}; ")
-          Execute(zeppelin_kinit_cmd, user=params.zeppelin_user)
-
-        scheme = "https" if params.ui_ssl_enabled else "http"
-        Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate 
-u: -k {scheme}://{zeppelin_host}:{zeppelin_port} | grep 200"),
-                tries = 10,
-                try_sleep=3,
-                logoutput=True)
-
-if __name__ == "__main__":
-    ZeppelinServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/spark2_config_template.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/spark2_config_template.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/spark2_config_template.py
deleted file mode 100644
index 28a63c6..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/spark2_config_template.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-template = '''
-{
-  "id": "2C4U48MY3_spark2",
-  "name": "spark2",
-  "group": "spark",
-  "properties": {
-    "spark.executor.memory": "",
-    "args": "",
-    "zeppelin.spark.printREPLOutput": "true",
-    "spark.cores.max": "",
-    "zeppelin.dep.additionalRemoteRepository": 
"spark-packages,http://dl.bintray.com/spark-packages/maven,false;";,
-    "zeppelin.spark.importImplicit": "true",
-    "zeppelin.spark.sql.stacktrace": "false",
-    "zeppelin.spark.concurrentSQL": "false",
-    "zeppelin.spark.useHiveContext": "true",
-    "zeppelin.pyspark.python": "python",
-    "zeppelin.dep.localrepo": "local-repo",
-    "zeppelin.R.knitr": "true",
-    "zeppelin.spark.maxResult": "1000",
-    "master": "local[*]",
-    "spark.app.name": "Zeppelin",
-    "zeppelin.R.image.width": "100%",
-    "zeppelin.R.render.options": "out.format \u003d \u0027html\u0027, comment 
\u003d NA, echo \u003d FALSE, results \u003d \u0027asis\u0027, message \u003d 
F, warning \u003d F",
-    "zeppelin.R.cmd": "R"
-  },
-  "status": "READY",
-  "interpreterGroup": [
-    {
-      "name": "spark",
-      "class": "org.apache.zeppelin.spark.SparkInterpreter",
-      "defaultInterpreter": true
-    },
-    {
-      "name": "sql",
-      "class": "org.apache.zeppelin.spark.SparkSqlInterpreter",
-      "defaultInterpreter": false
-    },
-    {
-      "name": "dep",
-      "class": "org.apache.zeppelin.spark.DepInterpreter",
-      "defaultInterpreter": false
-    },
-    {
-      "name": "pyspark",
-      "class": "org.apache.zeppelin.spark.PySparkInterpreter",
-      "defaultInterpreter": false
-    },
-    {
-      "name": "r",
-      "class": "org.apache.zeppelin.spark.SparkRInterpreter",
-      "defaultInterpreter": false
-    }
-  ],
-  "dependencies": [],
-  "option": {
-    "remote": true,
-    "port": -1,
-    "perNoteSession": false,
-    "perNoteProcess": false,
-    "isExistingProcess": false,
-    "setPermission": false
-  }
-}
-'''
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/status_params.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/status_params.py
deleted file mode 100644
index 35360c6..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/scripts/status_params.py
+++ /dev/null
@@ -1,29 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management.libraries.script import Script
-
-config = Script.get_config()
-
-zeppelin_pid_dir = config['configurations']['zeppelin-env']['zeppelin_pid_dir']
-zeppelin_user = config['configurations']['zeppelin-env']['zeppelin_user']
-zeppelin_group = config['configurations']['zeppelin-env']['zeppelin_group']
-zeppelin_log_dir = config['configurations']['zeppelin-env']['zeppelin_log_dir']

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/templates/input.config-zeppelin.json.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/templates/input.config-zeppelin.json.j2
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/templates/input.config-zeppelin.json.j2
deleted file mode 100644
index 2b373d5..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/package/templates/input.config-zeppelin.json.j2
+++ /dev/null
@@ -1,48 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-  "input":[
-    {
-      "type":"zeppelin",
-      "rowtype":"service",
-      "path":"{{default('/configurations/zeppelin-env/zeppelin_log_dir', 
'/var/log/zeppelin')}}/zeppelin-zeppelin-*.log"
-    }
-  ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "zeppelin"
-          ]
-        }
-      },
-      "log4j_format":"",
-      
"multiline_pattern":"^(%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\])",
-      
"message_pattern":"(?m)^%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}\\(\\{{"{"}}%{DATA:thread_name}\\{{"}"}}%{SPACE}%{JAVAFILE:file}\\[%{JAVAMETHOD:method}\\]:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-        }
-      }
-    }
-  ]
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/quicklinks/quicklinks.json
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/quicklinks/quicklinks.json
deleted file mode 100644
index c1d8491..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/quicklinks/quicklinks.json
+++ /dev/null
@@ -1,35 +0,0 @@
-{
-  "name": "default",
-  "description": "default quick links configuration",
-  "configuration": {
-    "protocol":
-    {
-      "type":"https",
-      "checks":[
-        {
-          "property":"zeppelin.ssl",
-          "desired":"true",
-          "site":"zeppelin-config"
-        }
-      ]
-    },
-
-    "links": [
-      {
-        "name": "zeppelin_ui",
-        "label": "Zeppelin UI",
-        "requires_user_name": "false",
-        "component_name": "ZEPPELIN_MASTER",
-        "url":"%@://%@:%@/",
-        "port":{
-          "http_property": "zeppelin.server.port",
-          "http_default_port": "9995",
-          "https_property": "zeppelin.server.port",
-          "https_default_port": "9995",
-          "regex": "^(\\d+)$",
-          "site": "zeppelin-config"
-        }
-      }
-    ]
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/role_command_order.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/role_command_order.json
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/role_command_order.json
deleted file mode 100644
index 3b7d2d0..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/role_command_order.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "general_deps" : {
-    "_comment" : "dependencies for ZEPPELIN",
-    "ZEPPELIN_MASTER-START" : ["NAMENODE-START"],
-    "ZEPPELIN_SERVICE_CHECK-SERVICE_CHECK" : ["ZEPPELIN_MASTER-START"]
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/service_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/service_advisor.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/service_advisor.py
deleted file mode 100644
index 4548961..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/service_advisor.py
+++ /dev/null
@@ -1,209 +0,0 @@
-#!/usr/bin/env ambari-python-wrap
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-# Python imports
-import imp
-import os
-import traceback
-import re
-import socket
-import fnmatch
-
-
-from resource_management.core.logger import Logger
-
-SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
-STACKS_DIR = os.path.join(SCRIPT_DIR, '../../../stacks/')
-PARENT_FILE = os.path.join(STACKS_DIR, 'service_advisor.py')
-
-try:
-  with open(PARENT_FILE, 'rb') as fp:
-    service_advisor = imp.load_module('service_advisor', fp, PARENT_FILE, 
('.py', 'rb', imp.PY_SOURCE))
-except Exception as e:
-  traceback.print_exc()
-  print "Failed to load parent"
-
-class ZeppelinServiceAdvisor(service_advisor.ServiceAdvisor):
-
-  def __init__(self, *args, **kwargs):
-    self.as_super = super(ZeppelinServiceAdvisor, self)
-    self.as_super.__init__(*args, **kwargs)
-
-    # Always call these methods
-    self.modifyMastersWithMultipleInstances()
-    self.modifyCardinalitiesDict()
-    self.modifyHeapSizeProperties()
-    self.modifyNotValuableComponents()
-    self.modifyComponentsNotPreferableOnServer()
-    self.modifyComponentLayoutSchemes()
-
-  def modifyMastersWithMultipleInstances(self):
-    """
-    Modify the set of masters with multiple instances.
-    Must be overriden in child class.
-    """
-    # Nothing to do
-    pass
-
-  def modifyCardinalitiesDict(self):
-    """
-    Modify the dictionary of cardinalities.
-    Must be overriden in child class.
-    """
-    # Nothing to do
-    pass
-
-  def modifyHeapSizeProperties(self):
-    """
-    Modify the dictionary of heap size properties.
-    Must be overriden in child class.
-    """
-    pass
-
-  def modifyNotValuableComponents(self):
-    """
-    Modify the set of components whose host assignment is based on other 
services.
-    Must be overriden in child class.
-    """
-    # Nothing to do
-    pass
-
-  def modifyComponentsNotPreferableOnServer(self):
-    """
-    Modify the set of components that are not preferable on the server.
-    Must be overriden in child class.
-    """
-    # Nothing to do
-    pass
-
-  def modifyComponentLayoutSchemes(self):
-    """
-    Modify layout scheme dictionaries for components.
-    The scheme dictionary basically maps the number of hosts to
-    host index where component should exist.
-    Must be overriden in child class.
-    """
-    # Nothing to do
-    pass
-
-  def getServiceComponentLayoutValidations(self, services, hosts):
-    """
-    Get a list of errors.
-    Must be overriden in child class.
-    """
-
-    return []
-
-  def getServiceConfigurationRecommendations(self, configurations, 
clusterData, services, hosts):
-    """
-    Entry point.
-    Must be overriden in child class.
-    """
-    #Logger.info("Class: %s, Method: %s. Recommending Service Configurations." 
%
-    #            (self.__class__.__name__, inspect.stack()[0][3]))
-
-    recommender = ZeppelinRecommender()
-    recommender.recommendZeppelinConfigurationsFromHDP25(configurations, 
clusterData, services, hosts)
-
-  def getServiceConfigurationsValidationItems(self, configurations, 
recommendedDefaults, services, hosts):
-    """
-    Entry point.
-    Validate configurations for the service. Return a list of errors.
-    The code for this function should be the same for each Service Advisor.
-    """
-    #Logger.info("Class: %s, Method: %s. Validating Configurations." %
-    #            (self.__class__.__name__, inspect.stack()[0][3]))
-
-    validator = ZeppelinValidator()
-    # Calls the methods of the validator using arguments,
-    # method(siteProperties, siteRecommendations, configurations, services, 
hosts)
-    return validator.validateListOfConfigUsingMethod(configurations, 
recommendedDefaults, services, hosts, validator.validators)
-
-
-
-class ZeppelinRecommender(service_advisor.ServiceAdvisor):
-  """
-  Zeppelin Recommender suggests properties when adding the service for the 
first time or modifying configs via the UI.
-  """
-
-  def __init__(self, *args, **kwargs):
-    self.as_super = super(ZeppelinRecommender, self)
-    self.as_super.__init__(*args, **kwargs)
-
-  def recommendZeppelinConfigurationsFromHDP25(self, configurations, 
clusterData, services, hosts):
-    """
-    :type configurations dict
-    :type clusterData dict
-    :type services dict
-    :type hosts dict
-    """
-    self.__recommendLivySuperUsers(configurations, services)
-
-  def __recommendLivySuperUsers(self, configurations, services):
-    """
-    If Kerberos is enabled AND Zeppelin is installed and Spark Livy Server is 
installed, then set
-    livy-conf/livy.superusers to contain the Zeppelin principal name from
-    zeppelin-env/zeppelin.server.kerberos.principal
-
-    :param configurations:
-    :param services:
-    """
-    if self.isSecurityEnabled(services):
-      zeppelin_env = self.getServicesSiteProperties(services, "zeppelin-env")
-
-      if zeppelin_env and 'zeppelin.server.kerberos.principal' in zeppelin_env:
-        zeppelin_principal = zeppelin_env['zeppelin.server.kerberos.principal']
-        zeppelin_user = zeppelin_principal.split('@')[0] if zeppelin_principal 
else None
-
-        if zeppelin_user:
-          livy_conf = self.getServicesSiteProperties(services, 'livy-conf')
-
-          if livy_conf:
-            superusers = livy_conf['livy.superusers'] if livy_conf and 
'livy.superusers' in livy_conf else None
-
-            # add the Zeppelin user to the set of users
-            if superusers:
-              _superusers = superusers.split(',')
-              _superusers = [x.strip() for x in _superusers]
-              _superusers = filter(None, _superusers)  # Removes empty string 
elements from array
-            else:
-              _superusers = []
-
-            if zeppelin_user not in _superusers:
-              _superusers.append(zeppelin_user)
-
-              putLivyProperty = self.putProperty(configurations, 'livy-conf', 
services)
-              putLivyProperty('livy.superusers', ','.join(_superusers))
-
-class ZeppelinValidator(service_advisor.ServiceAdvisor):
-  """
-  Zeppelin Validator checks the correctness of properties whenever the service 
is first added or the user attempts to
-  change configs via the UI.
-  """
-
-  def __init__(self, *args, **kwargs):
-    self.as_super = super(ZeppelinValidator, self)
-    self.as_super.__init__(*args, **kwargs)
-
-    self.validators = []
-
-
-
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/alerts.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/alerts.json 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/alerts.json
new file mode 100644
index 0000000..4b62236
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/alerts.json
@@ -0,0 +1,18 @@
+{
+  "ZEPPELIN": {
+    "service": [],
+    "ZEPPELIN_MASTER": [
+      {
+        "name": "zeppelin_server_status",
+        "label": "Zeppelin Server Status",
+        "description": "This host-level alert is triggered if the Zeppelin 
server cannot be determined to be up and responding to client requests.",
+        "interval": 1,
+        "scope": "ANY",
+        "source": {
+          "type": "SCRIPT",
+          "path": "ZEPPELIN/0.6.0/package/scripts/alert_check_zeppelin.py"
+        }
+      }
+    ]
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-config.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-config.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-config.xml
new file mode 100644
index 0000000..bd6ad76
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-config.xml
@@ -0,0 +1,208 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+  <!-- contents of actual zeppelin-site.xml -->
+  <property>
+    <name>zeppelin.server.addr</name>
+    <value>0.0.0.0</value>
+    <description>Server address</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.server.port</name>
+    <value>9995</value>
+    <description>Server port.The subsequent port (e.g. 9996) should also be 
open as it will be
+            used by the web socket
+        </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.server.ssl.port</name>
+    <value>9995</value>
+    <description>Server ssl port. (used when ssl property is set to true)
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.notebook.dir</name>
+    <value>notebook</value>
+    <description>notebook persist</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.notebook.homescreen</name>
+    <value> </value>
+    <description>id of notebook to be displayed in homescreen. e.g.) 2A94M5J1Z 
Empty value
+            displays default home screen
+        </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.notebook.homescreen.hide</name>
+    <value>false</value>
+    <description>hide homescreen notebook from list when this value set to 
true</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.notebook.s3.user</name>
+    <value>user</value>
+    <description>user name for s3 folder structure. If S3 is used to store the 
notebooks, it is
+            necessary to use the following folder structure 
bucketname/username/notebook/
+        </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.notebook.s3.bucket</name>
+    <value>zeppelin</value>
+    <description>bucket name for notebook storage. If S3 is used to store the 
notebooks, it is
+            necessary to use the following folder structure 
bucketname/username/notebook/
+        </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.notebook.storage</name>
+    <value>org.apache.zeppelin.notebook.repo.HdfsNotebookRepo</value>
+    <description>notebook persistence layer implementation. If S3 is used, set 
this to
+            org.apache.zeppelin.notebook.repo.S3NotebookRepo instead. If S3 is 
used to store the
+            notebooks, it is necessary to use the following folder structure
+            bucketname/username/notebook/
+        </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.interpreter.dir</name>
+    <value>interpreter</value>
+    <description>Interpreter implementation base directory</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.interpreters</name>
+    
<value>org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.jdbc.JDBCInterpreter,org.apache.zeppelin.phoenix.PhoenixInterpreter,org.apache.zeppelin.livy.LivySparkInterpreter,org.apache.zeppelin.livy.LivyPySparkInterpreter,org.apache.zeppelin.livy.LivySparkRInterpreter,org.apache.zeppelin.livy.LivySparkSQLInterpreter</value>
+    <description>Comma separated interpreter configurations. First interpreter 
become a
+            default
+        </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.interpreter.group.order</name>
+    <value>spark,angular,jdbc,livy,md,sh</value>
+    <description>Comma separated interpreter configurations. First interpreter 
become default
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.interpreter.connect.timeout</name>
+    <value>30000</value>
+    <description>Interpreter process connect timeout in msec.</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.ssl</name>
+    <value>false</value>
+    <description>Should SSL be used by the servers?</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.ssl.client.auth</name>
+    <value>false</value>
+    <description>Should client authentication be used for SSL 
connections?</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.ssl.keystore.path</name>
+    <value>conf/keystore</value>
+    <description>Path to keystore relative to Zeppelin home</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.ssl.keystore.type</name>
+    <value>JKS</value>
+    <description>The format of the given keystore (e.g. JKS or 
PKCS12)</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.ssl.keystore.password</name>
+    <value>change me</value>
+    <description>Keystore password. Can be obfuscated by the Jetty Password 
tool</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.ssl.key.manager.password</name>
+    <value>change me</value>
+    <description>Key Manager password. Defaults to keystore password. Can be 
obfuscated.
+        </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.ssl.truststore.path</name>
+    <value>conf/truststore</value>
+    <description>Path to truststore relative to Zeppelin home. Defaults to the 
keystore path
+        </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.ssl.truststore.type</name>
+    <value>JKS</value>
+    <description>The format of the given truststore (e.g. JKS or PKCS12). 
Defaults to the same
+            type as the keystore type
+        </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.ssl.truststore.password</name>
+    <value>change me</value>
+    <description>Truststore password. Can be obfuscated by the Jetty Password 
tool. Defaults to
+            the keystore password
+        </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.server.allowed.origins</name>
+    <value>*</value>
+    <description>Allowed sources for REST and WebSocket requests (i.e.
+            http://onehost:8080,http://otherhost.com). If you leave * you are 
vulnerable to
+            https://issues.apache.org/jira/browse/ZEPPELIN-173
+        </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.anonymous.allowed</name>
+    <value>false</value>
+    <description>Anonymous user allowed by default</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.notebook.public</name>
+    <value>false</value>
+    <description>Make notebook public by default when created, private 
otherwise</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.websocket.max.text.message.size</name>
+    <value>1024000</value>
+    <description>Size in characters of the maximum text message to be received 
by websocket. Defaults to 1024000</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.interpreter.config.upgrade</name>
+    <value>true</value>
+    <description>If this is set to true, on every restart of Zeppelin server 
default interpreter parameters will be reset</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-env.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-env.xml
new file mode 100644
index 0000000..fb3b512
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-env.xml
@@ -0,0 +1,184 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+  <property>
+    <name>zeppelin_pid_dir</name>
+    <value>/var/run/zeppelin</value>
+    <description>Dir containing process ID file</description>
+    <value-attributes>
+      <type>directory</type>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin_user</name>
+    <value>zeppelin</value>
+    <property-type>USER</property-type>
+    <description>User zeppelin daemon runs as</description>
+    <value-attributes>
+      <type>user</type>
+      <overridable>false</overridable>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin_group</name>
+    <value>zeppelin</value>
+    <property-type>GROUP</property-type>
+    <description>zeppelin group</description>
+    <value-attributes>
+      <type>user</type>
+      <overridable>false</overridable>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin_log_dir</name>
+    <value>/var/log/zeppelin</value>
+    <description>Zeppelin Log dir</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin_env_content</name>
+    <description>This is the jinja template for zeppelin-env.sh 
file</description>
+    <value>
+# export JAVA_HOME=
+export JAVA_HOME={{java64_home}}
+# export MASTER=                              # Spark master url. eg. 
spark://master_addr:7077. Leave empty if you want to use local mode.
+export MASTER=yarn-client
+export SPARK_YARN_JAR={{spark_jar}}
+# export ZEPPELIN_JAVA_OPTS                   # Additional jvm options. for 
example, export ZEPPELIN_JAVA_OPTS="-Dspark.executor.memory=8g 
-Dspark.cores.max=16"
+# export ZEPPELIN_MEM                         # Zeppelin jvm mem options 
Default -Xms1024m -Xmx1024m -XX:MaxPermSize=512m
+# export ZEPPELIN_INTP_MEM                    # zeppelin interpreter process 
jvm mem options. Default -Xms1024m -Xmx1024m -XX:MaxPermSize=512m
+# export ZEPPELIN_INTP_JAVA_OPTS              # zeppelin interpreter process 
jvm options.
+# export ZEPPELIN_SSL_PORT                    # ssl port (used when ssl 
environment variable is set to true)
+
+# export ZEPPELIN_LOG_DIR                     # Where log files are stored.  
PWD by default.
+export ZEPPELIN_LOG_DIR={{zeppelin_log_dir}}
+# export ZEPPELIN_PID_DIR                     # The pid files are stored. 
${ZEPPELIN_HOME}/run by default.
+export ZEPPELIN_PID_DIR={{zeppelin_pid_dir}}
+# export ZEPPELIN_WAR_TEMPDIR                 # The location of jetty 
temporary directory.
+# export ZEPPELIN_NOTEBOOK_DIR                # Where notebook saved
+# export ZEPPELIN_NOTEBOOK_HOMESCREEN         # Id of notebook to be displayed 
in homescreen. ex) 2A94M5J1Z
+# export ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE    # hide homescreen notebook from 
list when this value set to "true". default "false"
+# export ZEPPELIN_NOTEBOOK_S3_BUCKET          # Bucket where notebook saved
+# export ZEPPELIN_NOTEBOOK_S3_ENDPOINT        # Endpoint of the bucket
+# export ZEPPELIN_NOTEBOOK_S3_USER            # User in bucket where notebook 
saved. For example bucket/user/notebook/2A94M5J1Z/note.json
+# export ZEPPELIN_IDENT_STRING                # A string representing this 
instance of zeppelin. $USER by default.
+# export ZEPPELIN_NICENESS                    # The scheduling priority for 
daemons. Defaults to 0.
+# export ZEPPELIN_INTERPRETER_LOCALREPO       # Local repository for 
interpreter's additional dependency loading
+# export ZEPPELIN_NOTEBOOK_STORAGE            # Refers to pluggable notebook 
storage class, can have two classes simultaneously with a sync between them 
(e.g. local and remote).
+# export ZEPPELIN_NOTEBOOK_ONE_WAY_SYNC       # If there are multiple notebook 
storages, should we treat the first one as the only source of truth?
+# export ZEPPELIN_NOTEBOOK_PUBLIC             # Make notebook public by 
default when created, private otherwise
+export ZEPPELIN_INTP_CLASSPATH_OVERRIDES="{{external_dependency_conf}}"
+#### Spark interpreter configuration ####
+
+## Kerberos ticket refresh setting
+##
+export KINIT_FAIL_THRESHOLD=5
+export KERBEROS_REFRESH_INTERVAL=1d
+
+## Use provided spark installation ##
+## defining SPARK_HOME makes Zeppelin run spark interpreter process using 
spark-submit
+##
+# export SPARK_HOME                           # (required) When it is defined, 
load it instead of Zeppelin embedded Spark libraries
+# export SPARK_HOME={{spark_home}}
+# export SPARK_SUBMIT_OPTIONS                 # (optional) extra options to 
pass to spark submit. eg) "--driver-memory 512M --executor-memory 1G".
+# export SPARK_APP_NAME                       # (optional) The name of spark 
application.
+
+## Use embedded spark binaries ##
+## without SPARK_HOME defined, Zeppelin still able to run spark interpreter 
process using embedded spark binaries.
+## however, it is not encouraged when you can define SPARK_HOME
+##
+# Options read in YARN client mode
+# export HADOOP_CONF_DIR                      # yarn-site.xml is located in 
configuration directory in HADOOP_CONF_DIR.
+export HADOOP_CONF_DIR=/etc/hadoop/conf
+# Pyspark (supported with Spark 1.2.1 and above)
+# To configure pyspark, you need to set spark distribution's path to 
'spark.home' property in Interpreter setting screen in Zeppelin GUI
+# export PYSPARK_PYTHON                       # path to the python command. 
must be the same path on the driver(Zeppelin) and all workers.
+# export PYTHONPATH
+
+## Spark interpreter options ##
+##
+# export ZEPPELIN_SPARK_USEHIVECONTEXT        # Use HiveContext instead of 
SQLContext if set true. true by default.
+# export ZEPPELIN_SPARK_CONCURRENTSQL         # Execute multiple SQL 
concurrently if set true. false by default.
+# export ZEPPELIN_SPARK_IMPORTIMPLICIT        # Import implicits, UDF 
collection, and sql if set true. true by default.
+# export ZEPPELIN_SPARK_MAXRESULT             # Max number of Spark SQL result 
to display. 1000 by default.
+# export ZEPPELIN_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE       # Size in characters 
of the maximum text message to be received by websocket. Defaults to 1024000
+
+
+#### HBase interpreter configuration ####
+
+## To connect to HBase running on a cluster, either HBASE_HOME or 
HBASE_CONF_DIR must be set
+
+# export HBASE_HOME=                          # (require) Under which HBase 
scripts and configuration should be
+# export HBASE_CONF_DIR=                      # (optional) Alternatively, 
configuration directory can be set to point to the directory that has 
hbase-site.xml
+
+# export ZEPPELIN_IMPERSONATE_CMD             # Optional, when user want to 
run interpreter as end web user. eg) 'sudo -H -u ${ZEPPELIN_IMPERSONATE_USER} 
bash -c '
+
+    </value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.executor.mem</name>
+    <value>512m</value>
+    <description>Executor memory to use (e.g. 512m or 1g)</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.executor.instances</name>
+    <value>2</value>
+    <description>Number of executor instances to use (e.g. 2)</description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.spark.jar.dir</name>
+    <value>/apps/zeppelin</value>
+    <description>Shared location where zeppelin spark jar will be copied to. 
Should be accesible
+      by all cluster nodes
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+
+  <property>
+    <name>zeppelin.server.kerberos.principal</name>
+    <value/>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <description>
+      Kerberos principal name for the Zeppelin.
+    </description>
+    <property-type>KERBEROS_PRINCIPAL</property-type>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>zeppelin.server.kerberos.keytab</name>
+    <value/>
+    <value-attributes>
+      <empty-value-valid>true</empty-value-valid>
+    </value-attributes>
+    <description>
+      Location of the kerberos keytab file for the Zeppelin.
+    </description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-log4j-properties.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-log4j-properties.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-log4j-properties.xml
new file mode 100644
index 0000000..bf50947
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-log4j-properties.xml
@@ -0,0 +1,37 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+<property>
+    <name>log4j_properties_content</name>
+    <description>This is the content for log4j.properties file</description>
+    <value>
+log4j.rootLogger = INFO, dailyfile
+log4j.appender.stdout = org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout = org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m%n
+log4j.appender.dailyfile.DatePattern=.yyyy-MM-dd
+log4j.appender.dailyfile.Threshold = INFO
+log4j.appender.dailyfile = org.apache.log4j.DailyRollingFileAppender
+log4j.appender.dailyfile.File = ${zeppelin.log.file}
+log4j.appender.dailyfile.layout = org.apache.log4j.PatternLayout
+log4j.appender.dailyfile.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - 
%m%n
+    </value>
+    <on-ambari-upgrade add="true"/>
+</property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-shiro-ini.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-shiro-ini.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-shiro-ini.xml
new file mode 100644
index 0000000..b46d9ff
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/configuration/zeppelin-shiro-ini.xml
@@ -0,0 +1,97 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+  <property>
+    <name>shiro_ini_content</name>
+    <description>This is the jinja template for shiro.ini file</description>
+      <value>
+[users]
+# List of users with their password allowed to access Zeppelin.
+# To use a different strategy (LDAP / Database / ...) check the shiro doc at 
http://shiro.apache.org/configuration.html#Configuration-INISections
+admin = admin, admin
+user1 = user1, role1, role2
+user2 = user2, role3
+user3 = user3, role2
+
+# Sample LDAP configuration, for user Authentication, currently tested for 
single Realm
+[main]
+### A sample for configuring Active Directory Realm
+#activeDirectoryRealm = org.apache.zeppelin.realm.ActiveDirectoryGroupRealm
+#activeDirectoryRealm.systemUsername = userNameA
+
+#use either systemPassword or hadoopSecurityCredentialPath, more details in 
http://zeppelin.apache.org/docs/latest/security/shiroauthentication.html
+#activeDirectoryRealm.systemPassword = passwordA
+#activeDirectoryRealm.hadoopSecurityCredentialPath = 
jceks://file/user/zeppelin/zeppelin.jceks
+#activeDirectoryRealm.searchBase = CN=Users,DC=SOME_GROUP,DC=COMPANY,DC=COM
+#activeDirectoryRealm.url = ldap://ldap.test.com:389
+#activeDirectoryRealm.groupRolesMap = 
"CN=admin,OU=groups,DC=SOME_GROUP,DC=COMPANY,DC=COM":"admin","CN=finance,OU=groups,DC=SOME_GROUP,DC=COMPANY,DC=COM":"finance","CN=hr,OU=groups,DC=SOME_GROUP,DC=COMPANY,DC=COM":"hr"
+#activeDirectoryRealm.authorizationCachingEnabled = false
+
+### A sample for configuring LDAP Directory Realm
+#ldapRealm = org.apache.zeppelin.realm.LdapGroupRealm
+## search base for ldap groups (only relevant for LdapGroupRealm):
+#ldapRealm.contextFactory.environment[ldap.searchBase] = dc=COMPANY,dc=COM
+#ldapRealm.contextFactory.url = ldap://ldap.test.com:389
+#ldapRealm.userDnTemplate = uid={0},ou=Users,dc=COMPANY,dc=COM
+#ldapRealm.contextFactory.authenticationMechanism = SIMPLE
+
+### A sample PAM configuration
+#pamRealm=org.apache.zeppelin.realm.PamRealm
+#pamRealm.service=sshd
+
+
+sessionManager = org.apache.shiro.web.session.mgt.DefaultWebSessionManager
+### If caching of user is required then uncomment below lines
+cacheManager = org.apache.shiro.cache.MemoryConstrainedCacheManager
+securityManager.cacheManager = $cacheManager
+
+cookie = org.apache.shiro.web.servlet.SimpleCookie
+cookie.name = JSESSIONID
+#Uncomment the line below when running Zeppelin-Server in HTTPS mode
+#cookie.secure = true
+cookie.httpOnly = true
+sessionManager.sessionIdCookie = $cookie
+
+securityManager.sessionManager = $sessionManager
+# 86,400,000 milliseconds = 24 hour
+securityManager.sessionManager.globalSessionTimeout = 86400000
+shiro.loginUrl = /api/login
+
+[roles]
+role1 = *
+role2 = *
+role3 = *
+admin = *
+
+[urls]
+# This section is used for url-based security.
+# You can secure interpreter, configuration and credential information by 
urls. Comment or uncomment the below urls that you want to hide.
+# anon means the access is anonymous.
+# authc means Form based Auth Security
+# To enfore security, comment the line below and uncomment the next one
+/api/version = anon
+#/api/interpreter/** = authc, roles[admin]
+#/api/configurations/** = authc, roles[admin]
+#/api/credential/** = authc, roles[admin]
+#/** = anon
+/** = authc
+      </value>
+    <on-ambari-upgrade add="true"/>
+  </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/kerberos.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/kerberos.json 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/kerberos.json
new file mode 100644
index 0000000..b605c9d
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/kerberos.json
@@ -0,0 +1,51 @@
+{
+  "services": [
+    {
+      "name": "ZEPPELIN",
+      "identities": [
+        {
+          "name": "/smokeuser"
+        },
+        {
+          "name": "zeppelin_user",
+          "principal": {
+            "value": 
"${zeppelin-env/zeppelin_user}${principal_suffix}@${realm}",
+            "type" : "user",
+            "configuration": "zeppelin-env/zeppelin.server.kerberos.principal",
+            "local_username" : "${zeppelin-env/zeppelin_user}"
+          },
+          "keytab": {
+            "file": "${keytab_dir}/zeppelin.server.kerberos.keytab",
+            "owner": {
+              "name": "${zeppelin-env/zeppelin_user}",
+              "access": "r"
+            },
+            "group": {
+              "name": "${cluster-env/user_group}",
+              "access": ""
+            },
+            "configuration": "zeppelin-env/zeppelin.server.kerberos.keytab"
+          }
+        }
+      ],
+      "components": [
+        {
+          "name": "ZEPPELIN_MASTER"
+        }
+      ],
+      "configurations": [
+        {
+          "zeppelin-env": {
+            "zeppelin.kerberos.enabled": "true"
+          }
+        },
+        {
+          "core-site": {
+            "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.groups": "*",
+            "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.hosts": "*"
+          }
+        }
+      ]
+    }
+  ]
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/metainfo.xml 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/metainfo.xml
new file mode 100644
index 0000000..597b6db
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/metainfo.xml
@@ -0,0 +1,103 @@
+<?xml version="1.0"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>ZEPPELIN</name>
+      <displayName>Zeppelin Notebook</displayName>
+      <comment>A web-based notebook that enables interactive data analytics. 
It enables you to
+        make beautiful data-driven, interactive and collaborative documents 
with SQL, Scala
+        and more.
+      </comment>
+      <version>0.6.0</version>
+      <components>
+        <component>
+          <name>ZEPPELIN_MASTER</name>
+          <displayName>Zeppelin Notebook</displayName>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <versionAdvertised>true</versionAdvertised>
+          <commandScript>
+            <script>scripts/master.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>10000</timeout>
+          </commandScript>
+          <dependencies>
+            <dependency>
+              <name>SPARK/SPARK_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>YARN/YARN_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+          <logs>
+            <log>
+              <logId>zeppelin</logId>
+              <primary>true</primary>
+            </log>
+          </logs>
+        </component>
+      </components>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>zeppelin</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+      <commandScript>
+        <script>scripts/service_check.py</script>
+        <scriptType>PYTHON</scriptType>
+        <timeout>300</timeout>
+      </commandScript>
+
+      <requiredServices>
+        <service>HDFS</service>
+      </requiredServices>
+
+      <configuration-dependencies>
+        <config-type>zeppelin-config</config-type>
+        <config-type>zeppelin-env</config-type>
+        <config-type>zeppelin-shiro-ini</config-type>
+        <config-type>zeppelin-log4j-properties</config-type>
+      </configuration-dependencies>
+      <restartRequiredAfterChange>true</restartRequiredAfterChange>
+
+      <quickLinksConfigurations>
+        <quickLinksConfiguration>
+          <fileName>quicklinks.json</fileName>
+          <default>true</default>
+        </quickLinksConfiguration>
+      </quickLinksConfigurations>
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/alert_check_zeppelin.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/alert_check_zeppelin.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/alert_check_zeppelin.py
new file mode 100644
index 0000000..e6d7a91
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/alert_check_zeppelin.py
@@ -0,0 +1,47 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import glob
+import sys
+
+from resource_management.core.exceptions import ComponentIsNotRunning
+from resource_management.libraries.functions.check_process_status import 
check_process_status
+from resource_management.libraries.script import Script
+
+reload(sys)
+sys.setdefaultencoding('utf8')
+config = Script.get_config()
+
+zeppelin_pid_dir = config['configurations']['zeppelin-env']['zeppelin_pid_dir']
+
+RESULT_CODE_OK = 'OK'
+RESULT_CODE_CRITICAL = 'CRITICAL'
+RESULT_CODE_UNKNOWN = 'UNKNOWN'
+
+
+def execute(configurations={}, parameters={}, host_name=None):
+  try:
+    pid_file = glob.glob(zeppelin_pid_dir + '/zeppelin-*.pid')[0]
+    check_process_status(pid_file)
+  except ComponentIsNotRunning as ex:
+    return (RESULT_CODE_CRITICAL, [str(ex)])
+  except:
+    return (RESULT_CODE_CRITICAL, ["Zeppelin is not running"])
+
+  return (RESULT_CODE_OK, ["Successful connection to Zeppelin"])

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/interpreter_json_template.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/interpreter_json_template.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/interpreter_json_template.py
new file mode 100644
index 0000000..6a98919
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/interpreter_json_template.py
@@ -0,0 +1,361 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+template = '''
+{
+  "interpreterSettings": {
+    "2CKEKWY8Z": {
+      "id": "2CKEKWY8Z",
+      "name": "angular",
+      "group": "angular",
+      "properties": {},
+      "status": "READY",
+      "interpreterGroup": [
+        {
+          "name": "angular",
+          "class": "org.apache.zeppelin.angular.AngularInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "editOnDblClick": true
+          }
+        }
+      ],
+      "dependencies": [],
+      "option": {
+        "remote": true,
+        "port": -1,
+        "perNote": "shared",
+        "perUser": "shared",
+        "isExistingProcess": false,
+        "setPermission": false,
+        "users": [],
+        "isUserImpersonate": false
+      }
+    },
+    "2CKX8WPU1": {
+      "id": "2CKX8WPU1",
+      "name": "spark",
+      "group": "spark",
+      "properties": {
+        "spark.executor.memory": "512m",
+        "args": "",
+        "zeppelin.spark.printREPLOutput": "true",
+        "spark.cores.max": "",
+        "zeppelin.dep.additionalRemoteRepository": 
"spark-packages,http://dl.bintray.com/spark-packages/maven,false;";,
+        "zeppelin.spark.sql.stacktrace": "false",
+        "zeppelin.spark.importImplicit": "true",
+        "zeppelin.spark.concurrentSQL": "false",
+        "zeppelin.spark.useHiveContext": "true",
+        "zeppelin.pyspark.python": "python",
+        "zeppelin.dep.localrepo": "local-repo",
+        "zeppelin.R.knitr": "true",
+        "zeppelin.spark.maxResult": "1000",
+        "master": "yarn-client",
+        "spark.app.name": "Zeppelin",
+        "zeppelin.R.image.width": "100%",
+        "zeppelin.R.render.options": "out.format \u003d \u0027html\u0027, 
comment \u003d NA, echo \u003d FALSE, results \u003d \u0027asis\u0027, message 
\u003d F, warning \u003d F",
+        "zeppelin.R.cmd": "R"
+      },
+      "status": "READY",
+      "interpreterGroup": [
+        {
+          "name": "spark",
+          "class": "org.apache.zeppelin.spark.SparkInterpreter",
+          "defaultInterpreter": true,
+          "editor": {
+            "language": "scala"
+          }
+        },
+        {
+          "name": "sql",
+          "class": "org.apache.zeppelin.spark.SparkSqlInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "sql"
+          }
+        },
+        {
+          "name": "dep",
+          "class": "org.apache.zeppelin.spark.DepInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "scala"
+          }
+        },
+        {
+          "name": "pyspark",
+          "class": "org.apache.zeppelin.spark.PySparkInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "python"
+          }
+        },
+        {
+          "name": "r",
+          "class": "org.apache.zeppelin.spark.SparkRInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "r"
+          }
+        }
+      ],
+      "dependencies": [],
+      "option": {
+        "remote": true,
+        "port": -1,
+        "perNote": "shared",
+        "perUser": "shared",
+        "isExistingProcess": false,
+        "setPermission": false,
+        "users": [],
+        "isUserImpersonate": false
+      }
+    },
+    "2CK8A9MEG": {
+      "id": "2CK8A9MEG",
+      "name": "jdbc",
+      "group": "jdbc",
+      "properties": {
+        "default.password": "",
+        "zeppelin.jdbc.auth.type": "",
+        "common.max_count": "1000",
+        "zeppelin.jdbc.principal": "",
+        "default.user": "gpadmin",
+        "default.url": "jdbc:postgresql://localhost:5432/",
+        "default.driver": "org.postgresql.Driver",
+        "zeppelin.jdbc.keytab.location": "",
+        "zeppelin.jdbc.concurrent.use": "true",
+        "zeppelin.jdbc.concurrent.max_connection": "10"
+      },
+      "status": "READY",
+      "interpreterGroup": [
+        {
+          "name": "sql",
+          "class": "org.apache.zeppelin.jdbc.JDBCInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "sql",
+            "editOnDblClick": false
+          }
+        }
+      ],
+      "dependencies": [],
+      "option": {
+        "remote": true,
+        "port": -1,
+        "perNote": "shared",
+        "perUser": "shared",
+        "isExistingProcess": false,
+        "setPermission": false,
+        "users": [],
+        "isUserImpersonate": false
+      }
+    },
+    "2CKX6DGQZ": {
+      "id": "2CKX6DGQZ",
+      "name": "livy",
+      "group": "livy",
+      "properties": {
+        "zeppelin.livy.pull_status.interval.millis": "1000",
+        "livy.spark.executor.memory": "",
+        "zeppelin.livy.session.create_timeout": "120",
+        "zeppelin.livy.principal": "",
+        "zeppelin.livy.spark.sql.maxResult": "1000",
+        "zeppelin.livy.keytab": "",
+        "zeppelin.livy.concurrentSQL": "false",
+        "zeppelin.livy.spark.sql.field.truncate": "true",
+        "livy.spark.executor.cores": "",
+        "zeppelin.livy.displayAppInfo": "false",
+        "zeppelin.livy.url": "http://localhost:8998";,
+        "livy.spark.dynamicAllocation.minExecutors": "",
+        "livy.spark.driver.cores": "",
+        "livy.spark.jars.packages": "",
+        "livy.spark.dynamicAllocation.enabled": "",
+        "livy.spark.executor.instances": "",
+        "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": "",
+        "livy.spark.dynamicAllocation.maxExecutors": "",
+        "livy.spark.dynamicAllocation.initialExecutors": "",
+        "livy.spark.driver.memory": ""
+      },
+      "status": "READY",
+      "interpreterGroup": [
+        {
+          "name": "spark",
+          "class": "org.apache.zeppelin.livy.LivySparkInterpreter",
+          "defaultInterpreter": true,
+          "editor": {
+            "language": "scala",
+            "editOnDblClick": false
+          }
+        },
+        {
+          "name": "sql",
+          "class": "org.apache.zeppelin.livy.LivySparkSQLInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "sql",
+            "editOnDblClick": false
+          }
+        },
+        {
+          "name": "pyspark",
+          "class": "org.apache.zeppelin.livy.LivyPySparkInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "python",
+            "editOnDblClick": false
+          }
+        },
+        {
+          "name": "pyspark3",
+          "class": "org.apache.zeppelin.livy.LivyPySpark3Interpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "python",
+            "editOnDblClick": false
+          }
+        },
+        {
+          "name": "sparkr",
+          "class": "org.apache.zeppelin.livy.LivySparkRInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "r",
+            "editOnDblClick": false
+          }
+        }
+      ],
+      "dependencies": [],
+      "option": {
+        "remote": true,
+        "port": -1,
+        "perNote": "shared",
+        "perUser": "scoped",
+        "isExistingProcess": false,
+        "setPermission": false,
+        "users": [],
+        "isUserImpersonate": false
+      }
+    },
+    "2CKAY1A8Y": {
+      "id": "2CKAY1A8Y",
+      "name": "md",
+      "group": "md",
+      "properties": {
+        "markdown.parser.type": "pegdown"
+      },
+      "status": "READY",
+      "interpreterGroup": [
+        {
+          "name": "md",
+          "class": "org.apache.zeppelin.markdown.Markdown",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "markdown",
+            "editOnDblClick": true
+          }
+        }
+      ],
+      "dependencies": [],
+      "option": {
+        "remote": true,
+        "port": -1,
+        "perNote": "shared",
+        "perUser": "shared",
+        "isExistingProcess": false,
+        "setPermission": false,
+        "users": [],
+        "isUserImpersonate": false
+      }
+    },
+    "2CHS8UYQQ": {
+      "id": "2CHS8UYQQ",
+      "name": "sh",
+      "group": "sh",
+      "properties": {
+        "zeppelin.shell.keytab.location": "",
+        "shell.command.timeout.millisecs": "60000",
+        "zeppelin.shell.principal": "",
+        "zeppelin.shell.auth.type": ""
+      },
+      "status": "READY",
+      "interpreterGroup": [
+        {
+          "name": "sh",
+          "class": "org.apache.zeppelin.shell.ShellInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "sh",
+            "editOnDblClick": false
+          }
+        }
+      ],
+      "dependencies": [],
+      "option": {
+        "remote": true,
+        "port": -1,
+        "perNote": "shared",
+        "perUser": "shared",
+        "isExistingProcess": false,
+        "setPermission": false,
+        "users": [],
+        "isUserImpersonate": false
+      }
+    }
+  },
+  "interpreterBindings": {},
+  "interpreterRepositories": [
+    {
+      "id": "central",
+      "type": "default",
+      "url": "http://repo1.maven.org/maven2/";,
+      "releasePolicy": {
+        "enabled": true,
+        "updatePolicy": "daily",
+        "checksumPolicy": "warn"
+      },
+      "snapshotPolicy": {
+        "enabled": true,
+        "updatePolicy": "daily",
+        "checksumPolicy": "warn"
+      },
+      "mirroredRepositories": [],
+      "repositoryManager": false
+    },
+    {
+      "id": "local",
+      "type": "default",
+      "url": "file:///home/zeppelin/.m2/repository",
+      "releasePolicy": {
+        "enabled": true,
+        "updatePolicy": "daily",
+        "checksumPolicy": "warn"
+      },
+      "snapshotPolicy": {
+        "enabled": true,
+        "updatePolicy": "daily",
+        "checksumPolicy": "warn"
+      },
+      "mirroredRepositories": [],
+      "repositoryManager": false
+    }
+  ]
+}
+'''

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/livy2_config_template.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/livy2_config_template.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/livy2_config_template.py
new file mode 100644
index 0000000..71d3817
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/livy2_config_template.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+template = '''
+{
+  "id": "2C8A4SZ9T_livy2",
+  "status": "READY",
+  "group": "livy",
+  "name": "livy2",
+  "properties": {
+    "zeppelin.livy.keytab": "",
+    "zeppelin.livy.spark.sql.maxResult": "1000",
+    "livy.spark.executor.instances": "",
+    "livy.spark.executor.memory": "",
+    "livy.spark.dynamicAllocation.enabled": "",
+    "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": "",
+    "livy.spark.dynamicAllocation.initialExecutors": "",
+    "zeppelin.livy.session.create_timeout": "120",
+    "livy.spark.driver.memory": "",
+    "zeppelin.livy.displayAppInfo": "false",
+    "livy.spark.jars.packages": "",
+    "livy.spark.dynamicAllocation.maxExecutors": "",
+    "zeppelin.livy.concurrentSQL": "false",
+    "zeppelin.livy.principal": "",
+    "livy.spark.executor.cores": "",
+    "zeppelin.livy.url": "http://localhost:8998";,
+    "zeppelin.livy.pull_status.interval.millis": "1000",
+    "livy.spark.driver.cores": "",
+    "livy.spark.dynamicAllocation.minExecutors": ""
+  },
+  "interpreterGroup": [
+    {
+      "class": "org.apache.zeppelin.livy.LivySparkInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "scala"
+      },
+      "name": "spark",
+      "defaultInterpreter": false
+    },
+    {
+      "class": "org.apache.zeppelin.livy.LivySparkSQLInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "sql"
+      },
+      "name": "sql",
+      "defaultInterpreter": false
+    },
+    {
+      "class": "org.apache.zeppelin.livy.LivyPySparkInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "python"
+      },
+      "name": "pyspark",
+      "defaultInterpreter": false
+              },
+    {
+      "class": "org.apache.zeppelin.livy.LivyPySpark3Interpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "python"
+      },
+      "name": "pyspark3",
+      "defaultInterpreter": false
+    },
+    {
+      "class": "org.apache.zeppelin.livy.LivySparkRInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "r"
+      },
+      "name": "sparkr",
+      "defaultInterpreter": false
+    }
+  ],
+  "dependencies": [],
+  "option": {
+    "setPermission": false,
+    "remote": true,
+    "users": [],
+    "isExistingProcess": false,
+    "perUser": "scoped",
+    "isUserImpersonate": false,
+    "perNote": "shared",
+    "port": -1
+  }
+}
+'''

Reply via email to