AMBARI-21901.Add 0.7.x stack definition for Zeppelin(Prabhjyot Singh via 
Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/78afc58d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/78afc58d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/78afc58d

Branch: refs/heads/branch-2.6
Commit: 78afc58daaec7dc8c0e747a0bd4204d485146444
Parents: 395cf3b
Author: Venkata Sairam <venkatasairam.la...@gmail.com>
Authored: Mon Sep 18 12:28:14 2017 +0530
Committer: Venkata Sairam <venkatasairam.la...@gmail.com>
Committed: Mon Sep 18 12:28:14 2017 +0530

----------------------------------------------------------------------
 .../ZEPPELIN/0.6.0.2.5/alerts.json              |  18 -
 .../0.6.0.2.5/configuration/zeppelin-config.xml | 208 --------
 .../0.6.0.2.5/configuration/zeppelin-env.xml    | 185 -------
 .../configuration/zeppelin-log4j-properties.xml |  37 --
 .../configuration/zeppelin-logsearch-conf.xml   |  80 ---
 .../configuration/zeppelin-shiro-ini.xml        |  97 ----
 .../ZEPPELIN/0.6.0.2.5/kerberos.json            |  51 --
 .../ZEPPELIN/0.6.0.2.5/metainfo.xml             | 103 ----
 .../package/scripts/alert_check_zeppelin.py     |  47 --
 .../scripts/interpreter_json_template.py        | 361 -------------
 .../package/scripts/livy2_config_template.py    | 107 ----
 .../0.6.0.2.5/package/scripts/master.py         | 522 -------------------
 .../0.6.0.2.5/package/scripts/params.py         | 258 ---------
 .../0.6.0.2.5/package/scripts/service_check.py  |  40 --
 .../package/scripts/spark2_config_template.py   |  84 ---
 .../0.6.0.2.5/package/scripts/status_params.py  |  29 --
 .../0.6.0.2.5/quicklinks/quicklinks.json        |  35 --
 .../common-services/ZEPPELIN/0.6.0/alerts.json  |  18 +
 .../0.6.0/configuration/zeppelin-config.xml     | 208 ++++++++
 .../0.6.0/configuration/zeppelin-env.xml        | 185 +++++++
 .../configuration/zeppelin-log4j-properties.xml |  37 ++
 .../configuration/zeppelin-logsearch-conf.xml   |  80 +++
 .../0.6.0/configuration/zeppelin-shiro-ini.xml  |  97 ++++
 .../ZEPPELIN/0.6.0/kerberos.json                |  51 ++
 .../common-services/ZEPPELIN/0.6.0/metainfo.xml | 103 ++++
 .../package/scripts/alert_check_zeppelin.py     |  47 ++
 .../scripts/interpreter_json_template.py        | 361 +++++++++++++
 .../package/scripts/livy2_config_template.py    | 107 ++++
 .../ZEPPELIN/0.6.0/package/scripts/master.py    | 522 +++++++++++++++++++
 .../ZEPPELIN/0.6.0/package/scripts/params.py    | 258 +++++++++
 .../0.6.0/package/scripts/service_check.py      |  40 ++
 .../package/scripts/spark2_config_template.py   |  84 +++
 .../0.6.0/package/scripts/status_params.py      |  29 ++
 .../ZEPPELIN/0.6.0/quicklinks/quicklinks.json   |  35 ++
 .../common-services/ZEPPELIN/0.7.0/alerts.json  |  18 +
 .../0.7.0/configuration/zeppelin-config.xml     | 208 ++++++++
 .../0.7.0/configuration/zeppelin-env.xml        | 185 +++++++
 .../configuration/zeppelin-log4j-properties.xml |  37 ++
 .../configuration/zeppelin-logsearch-conf.xml   |  80 +++
 .../0.7.0/configuration/zeppelin-shiro-ini.xml  |  97 ++++
 .../ZEPPELIN/0.7.0/kerberos.json                |  51 ++
 .../common-services/ZEPPELIN/0.7.0/metainfo.xml | 103 ++++
 .../package/scripts/alert_check_zeppelin.py     |  47 ++
 .../scripts/interpreter_json_template.py        | 361 +++++++++++++
 .../package/scripts/livy2_config_template.py    | 107 ++++
 .../ZEPPELIN/0.7.0/package/scripts/master.py    | 522 +++++++++++++++++++
 .../ZEPPELIN/0.7.0/package/scripts/params.py    | 258 +++++++++
 .../0.7.0/package/scripts/service_check.py      |  40 ++
 .../package/scripts/spark2_config_template.py   |  84 +++
 .../0.7.0/package/scripts/status_params.py      |  29 ++
 .../ZEPPELIN/0.7.0/quicklinks/quicklinks.json   |  35 ++
 .../HDP/2.5/services/ZEPPELIN/metainfo.xml      |   4 +-
 .../HDP/2.6/services/ZEPPELIN/metainfo.xml      |  21 +-
 53 files changed, 4546 insertions(+), 2265 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/78afc58d/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/alerts.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/alerts.json
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/alerts.json
deleted file mode 100644
index 8e9b6e7..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/alerts.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
-  "ZEPPELIN": {
-    "service": [],
-    "ZEPPELIN_MASTER": [
-      {
-        "name": "zeppelin_server_status",
-        "label": "Zeppelin Server Status",
-        "description": "This host-level alert is triggered if the Zeppelin 
server cannot be determined to be up and responding to client requests.",
-        "interval": 1,
-        "scope": "ANY",
-        "source": {
-          "type": "SCRIPT",
-          "path": "ZEPPELIN/0.6.0.2.5/package/scripts/alert_check_zeppelin.py"
-        }
-      }
-    ]
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/78afc58d/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-config.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-config.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-config.xml
deleted file mode 100644
index bd6ad76..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-config.xml
+++ /dev/null
@@ -1,208 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<configuration>
-  <!-- contents of actual zeppelin-site.xml -->
-  <property>
-    <name>zeppelin.server.addr</name>
-    <value>0.0.0.0</value>
-    <description>Server address</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.server.port</name>
-    <value>9995</value>
-    <description>Server port.The subsequent port (e.g. 9996) should also be 
open as it will be
-            used by the web socket
-        </description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.server.ssl.port</name>
-    <value>9995</value>
-    <description>Server ssl port. (used when ssl property is set to true)
-    </description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.notebook.dir</name>
-    <value>notebook</value>
-    <description>notebook persist</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.notebook.homescreen</name>
-    <value> </value>
-    <description>id of notebook to be displayed in homescreen. e.g.) 2A94M5J1Z 
Empty value
-            displays default home screen
-        </description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.notebook.homescreen.hide</name>
-    <value>false</value>
-    <description>hide homescreen notebook from list when this value set to 
true</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.notebook.s3.user</name>
-    <value>user</value>
-    <description>user name for s3 folder structure. If S3 is used to store the 
notebooks, it is
-            necessary to use the following folder structure 
bucketname/username/notebook/
-        </description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.notebook.s3.bucket</name>
-    <value>zeppelin</value>
-    <description>bucket name for notebook storage. If S3 is used to store the 
notebooks, it is
-            necessary to use the following folder structure 
bucketname/username/notebook/
-        </description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.notebook.storage</name>
-    <value>org.apache.zeppelin.notebook.repo.HdfsNotebookRepo</value>
-    <description>notebook persistence layer implementation. If S3 is used, set 
this to
-            org.apache.zeppelin.notebook.repo.S3NotebookRepo instead. If S3 is 
used to store the
-            notebooks, it is necessary to use the following folder structure
-            bucketname/username/notebook/
-        </description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.interpreter.dir</name>
-    <value>interpreter</value>
-    <description>Interpreter implementation base directory</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.interpreters</name>
-    
<value>org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.jdbc.JDBCInterpreter,org.apache.zeppelin.phoenix.PhoenixInterpreter,org.apache.zeppelin.livy.LivySparkInterpreter,org.apache.zeppelin.livy.LivyPySparkInterpreter,org.apache.zeppelin.livy.LivySparkRInterpreter,org.apache.zeppelin.livy.LivySparkSQLInterpreter</value>
-    <description>Comma separated interpreter configurations. First interpreter 
become a
-            default
-        </description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.interpreter.group.order</name>
-    <value>spark,angular,jdbc,livy,md,sh</value>
-    <description>Comma separated interpreter configurations. First interpreter 
become default
-    </description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.interpreter.connect.timeout</name>
-    <value>30000</value>
-    <description>Interpreter process connect timeout in msec.</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.ssl</name>
-    <value>false</value>
-    <description>Should SSL be used by the servers?</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.ssl.client.auth</name>
-    <value>false</value>
-    <description>Should client authentication be used for SSL 
connections?</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.ssl.keystore.path</name>
-    <value>conf/keystore</value>
-    <description>Path to keystore relative to Zeppelin home</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.ssl.keystore.type</name>
-    <value>JKS</value>
-    <description>The format of the given keystore (e.g. JKS or 
PKCS12)</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.ssl.keystore.password</name>
-    <value>change me</value>
-    <description>Keystore password. Can be obfuscated by the Jetty Password 
tool</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.ssl.key.manager.password</name>
-    <value>change me</value>
-    <description>Key Manager password. Defaults to keystore password. Can be 
obfuscated.
-        </description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.ssl.truststore.path</name>
-    <value>conf/truststore</value>
-    <description>Path to truststore relative to Zeppelin home. Defaults to the 
keystore path
-        </description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.ssl.truststore.type</name>
-    <value>JKS</value>
-    <description>The format of the given truststore (e.g. JKS or PKCS12). 
Defaults to the same
-            type as the keystore type
-        </description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.ssl.truststore.password</name>
-    <value>change me</value>
-    <description>Truststore password. Can be obfuscated by the Jetty Password 
tool. Defaults to
-            the keystore password
-        </description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.server.allowed.origins</name>
-    <value>*</value>
-    <description>Allowed sources for REST and WebSocket requests (i.e.
-            http://onehost:8080,http://otherhost.com). If you leave * you are 
vulnerable to
-            https://issues.apache.org/jira/browse/ZEPPELIN-173
-        </description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.anonymous.allowed</name>
-    <value>false</value>
-    <description>Anonymous user allowed by default</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.notebook.public</name>
-    <value>false</value>
-    <description>Make notebook public by default when created, private 
otherwise</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.websocket.max.text.message.size</name>
-    <value>1024000</value>
-    <description>Size in characters of the maximum text message to be received 
by websocket. Defaults to 1024000</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.interpreter.config.upgrade</name>
-    <value>true</value>
-    <description>If this is set to true, on every restart of Zeppelin server 
default interpreter parameters will be reset</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78afc58d/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml
deleted file mode 100644
index 7f0d9e4..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-env.xml
+++ /dev/null
@@ -1,185 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<configuration>
-  <property>
-    <name>zeppelin_pid_dir</name>
-    <value>/var/run/zeppelin</value>
-    <description>Dir containing process ID file</description>
-    <value-attributes>
-      <type>directory</type>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin_user</name>
-    <value>zeppelin</value>
-    <property-type>USER</property-type>
-    <description>User zeppelin daemon runs as</description>
-    <value-attributes>
-      <type>user</type>
-      <overridable>false</overridable>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin_group</name>
-    <value>zeppelin</value>
-    <property-type>GROUP</property-type>
-    <description>zeppelin group</description>
-    <value-attributes>
-      <type>user</type>
-      <overridable>false</overridable>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin_log_dir</name>
-    <value>/var/log/zeppelin</value>
-    <description>Zeppelin Log dir</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin_env_content</name>
-    <description>This is the jinja template for zeppelin-env.sh 
file</description>
-    <value>
-# export JAVA_HOME=
-export JAVA_HOME={{java64_home}}
-# export MASTER=                              # Spark master url. eg. 
spark://master_addr:7077. Leave empty if you want to use local mode.
-export MASTER=yarn-client
-export SPARK_YARN_JAR={{spark_jar}}
-# export ZEPPELIN_JAVA_OPTS                   # Additional jvm options. for 
example, export ZEPPELIN_JAVA_OPTS="-Dspark.executor.memory=8g 
-Dspark.cores.max=16"
-# export ZEPPELIN_MEM                         # Zeppelin jvm mem options 
Default -Xms1024m -Xmx1024m -XX:MaxPermSize=512m
-# export ZEPPELIN_INTP_MEM                    # zeppelin interpreter process 
jvm mem options. Default -Xms1024m -Xmx1024m -XX:MaxPermSize=512m
-# export ZEPPELIN_INTP_JAVA_OPTS              # zeppelin interpreter process 
jvm options.
-# export ZEPPELIN_SSL_PORT                    # ssl port (used when ssl 
environment variable is set to true)
-
-# export ZEPPELIN_LOG_DIR                     # Where log files are stored.  
PWD by default.
-export ZEPPELIN_LOG_DIR={{zeppelin_log_dir}}
-# export ZEPPELIN_PID_DIR                     # The pid files are stored. 
${ZEPPELIN_HOME}/run by default.
-export ZEPPELIN_PID_DIR={{zeppelin_pid_dir}}
-# export ZEPPELIN_WAR_TEMPDIR                 # The location of jetty 
temporary directory.
-# export ZEPPELIN_NOTEBOOK_DIR                # Where notebook saved
-# export ZEPPELIN_NOTEBOOK_HOMESCREEN         # Id of notebook to be displayed 
in homescreen. ex) 2A94M5J1Z
-# export ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE    # hide homescreen notebook from 
list when this value set to "true". default "false"
-# export ZEPPELIN_NOTEBOOK_S3_BUCKET          # Bucket where notebook saved
-# export ZEPPELIN_NOTEBOOK_S3_ENDPOINT        # Endpoint of the bucket
-# export ZEPPELIN_NOTEBOOK_S3_USER            # User in bucket where notebook 
saved. For example bucket/user/notebook/2A94M5J1Z/note.json
-# export ZEPPELIN_IDENT_STRING                # A string representing this 
instance of zeppelin. $USER by default.
-# export ZEPPELIN_NICENESS                    # The scheduling priority for 
daemons. Defaults to 0.
-# export ZEPPELIN_INTERPRETER_LOCALREPO       # Local repository for 
interpreter's additional dependency loading
-# export ZEPPELIN_NOTEBOOK_STORAGE            # Refers to pluggable notebook 
storage class, can have two classes simultaneously with a sync between them 
(e.g. local and remote).
-# export ZEPPELIN_NOTEBOOK_ONE_WAY_SYNC       # If there are multiple notebook 
storages, should we treat the first one as the only source of truth?
-# export ZEPPELIN_NOTEBOOK_PUBLIC             # Make notebook public by 
default when created, private otherwise
-export ZEPPELIN_INTP_CLASSPATH_OVERRIDES="{{external_dependency_conf}}"
-#### Spark interpreter configuration ####
-
-## Kerberos ticket refresh setting
-##
-export KINIT_FAIL_THRESHOLD=5
-export KERBEROS_REFRESH_INTERVAL=1d
-
-## Use provided spark installation ##
-## defining SPARK_HOME makes Zeppelin run spark interpreter process using 
spark-submit
-##
-# export SPARK_HOME                           # (required) When it is defined, 
load it instead of Zeppelin embedded Spark libraries
-# export SPARK_HOME={{spark_home}}
-# export SPARK_SUBMIT_OPTIONS                 # (optional) extra options to 
pass to spark submit. eg) "--driver-memory 512M --executor-memory 1G".
-# export SPARK_APP_NAME                       # (optional) The name of spark 
application.
-
-## Use embedded spark binaries ##
-## without SPARK_HOME defined, Zeppelin still able to run spark interpreter 
process using embedded spark binaries.
-## however, it is not encouraged when you can define SPARK_HOME
-##
-# Options read in YARN client mode
-# export HADOOP_CONF_DIR                      # yarn-site.xml is located in 
configuration directory in HADOOP_CONF_DIR.
-export HADOOP_CONF_DIR=/etc/hadoop/conf
-# Pyspark (supported with Spark 1.2.1 and above)
-# To configure pyspark, you need to set spark distribution's path to 
'spark.home' property in Interpreter setting screen in Zeppelin GUI
-# export PYSPARK_PYTHON                       # path to the python command. 
must be the same path on the driver(Zeppelin) and all workers.
-# export PYTHONPATH
-
-
-## Spark interpreter options ##
-##
-# export ZEPPELIN_SPARK_USEHIVECONTEXT        # Use HiveContext instead of 
SQLContext if set true. true by default.
-# export ZEPPELIN_SPARK_CONCURRENTSQL         # Execute multiple SQL 
concurrently if set true. false by default.
-# export ZEPPELIN_SPARK_IMPORTIMPLICIT        # Import implicits, UDF 
collection, and sql if set true. true by default.
-# export ZEPPELIN_SPARK_MAXRESULT             # Max number of Spark SQL result 
to display. 1000 by default.
-# export ZEPPELIN_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE       # Size in characters 
of the maximum text message to be received by websocket. Defaults to 1024000
-
-
-#### HBase interpreter configuration ####
-
-## To connect to HBase running on a cluster, either HBASE_HOME or 
HBASE_CONF_DIR must be set
-
-# export HBASE_HOME=                          # (require) Under which HBase 
scripts and configuration should be
-# export HBASE_CONF_DIR=                      # (optional) Alternatively, 
configuration directory can be set to point to the directory that has 
hbase-site.xml
-
-# export ZEPPELIN_IMPERSONATE_CMD             # Optional, when user want to 
run interpreter as end web user. eg) 'sudo -H -u ${ZEPPELIN_IMPERSONATE_USER} 
bash -c '
-
-    </value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.executor.mem</name>
-    <value>512m</value>
-    <description>Executor memory to use (e.g. 512m or 1g)</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.executor.instances</name>
-    <value>2</value>
-    <description>Number of executor instances to use (e.g. 2)</description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.spark.jar.dir</name>
-    <value>/apps/zeppelin</value>
-    <description>Shared location where zeppelin spark jar will be copied to. 
Should be accesible
-      by all cluster nodes
-    </description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-
-  <property>
-    <name>zeppelin.server.kerberos.principal</name>
-    <value/>
-    <value-attributes>
-      <empty-value-valid>true</empty-value-valid>
-    </value-attributes>
-    <description>
-      Kerberos principal name for the Zeppelin.
-    </description>
-    <property-type>KERBEROS_PRINCIPAL</property-type>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>zeppelin.server.kerberos.keytab</name>
-    <value/>
-    <value-attributes>
-      <empty-value-valid>true</empty-value-valid>
-    </value-attributes>
-    <description>
-      Location of the kerberos keytab file for the Zeppelin.
-    </description>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78afc58d/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-log4j-properties.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-log4j-properties.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-log4j-properties.xml
deleted file mode 100644
index bf50947..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-log4j-properties.xml
+++ /dev/null
@@ -1,37 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<configuration>
-<property>
-    <name>log4j_properties_content</name>
-    <description>This is the content for log4j.properties file</description>
-    <value>
-log4j.rootLogger = INFO, dailyfile
-log4j.appender.stdout = org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout = org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - %m%n
-log4j.appender.dailyfile.DatePattern=.yyyy-MM-dd
-log4j.appender.dailyfile.Threshold = INFO
-log4j.appender.dailyfile = org.apache.log4j.DailyRollingFileAppender
-log4j.appender.dailyfile.File = ${zeppelin.log.file}
-log4j.appender.dailyfile.layout = org.apache.log4j.PatternLayout
-log4j.appender.dailyfile.layout.ConversionPattern=%5p [%d] ({%t} %F[%M]:%L) - 
%m%n
-    </value>
-    <on-ambari-upgrade add="true"/>
-</property>
-</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/78afc58d/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-logsearch-conf.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-logsearch-conf.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-logsearch-conf.xml
deleted file mode 100644
index 0b8ab7a..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-logsearch-conf.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--->
-<configuration supports_final="false" supports_adding_forbidden="true">
-  <property>
-    <name>service_name</name>
-    <display-name>Service name</display-name>
-    <description>Service name for Logsearch Portal (label)</description>
-    <value>Zeppelin</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>component_mappings</name>
-    <display-name>Component mapping</display-name>
-    <description>Logsearch component logid mapping list (e.g.: 
COMPONENT1:logid1,logid2;COMPONENT2:logid3)</description>
-    <value>ZEPPELIN_MASTER:zeppelin</value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-  <property>
-    <name>content</name>
-    <display-name>Logfeeder Config</display-name>
-    <description>Metadata jinja template for Logfeeder which contains grok 
patterns for reading service specific logs.</description>
-    <value>
-{
-  "input":[
-    {
-      "type":"zeppelin",
-      "rowtype":"service",
-      "path":"{{default('/configurations/zeppelin-env/zeppelin_log_dir', 
'/var/log/zeppelin')}}/zeppelin-zeppelin-*.log"
-    }
-   ],
-  "filter":[
-    {
-      "filter":"grok",
-      "conditions":{
-        "fields":{
-          "type":[
-            "zeppelin"
-          ]
-         }
-       },
-      "log4j_format":"",
-      
"multiline_pattern":"^(%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\])",
-      
"message_pattern":"(?m)^%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}\\(\\{{"{"}}%{DATA:thread_name}\\{{"}"}}%{SPACE}%{JAVAFILE:file}\\[%{JAVAMETHOD:method}\\]:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-      "post_map_values":{
-        "logtime":{
-          "map_date":{
-            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-          }
-         }
-       }
-     }
-   ]
- }
-    </value>
-    <value-attributes>
-      <type>content</type>
-      <show-property-name>false</show-property-name>
-    </value-attributes>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78afc58d/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-shiro-ini.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-shiro-ini.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-shiro-ini.xml
deleted file mode 100644
index b46d9ff..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/configuration/zeppelin-shiro-ini.xml
+++ /dev/null
@@ -1,97 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<configuration>
-  <property>
-    <name>shiro_ini_content</name>
-    <description>This is the jinja template for shiro.ini file</description>
-      <value>
-[users]
-# List of users with their password allowed to access Zeppelin.
-# To use a different strategy (LDAP / Database / ...) check the shiro doc at 
http://shiro.apache.org/configuration.html#Configuration-INISections
-admin = admin, admin
-user1 = user1, role1, role2
-user2 = user2, role3
-user3 = user3, role2
-
-# Sample LDAP configuration, for user Authentication, currently tested for 
single Realm
-[main]
-### A sample for configuring Active Directory Realm
-#activeDirectoryRealm = org.apache.zeppelin.realm.ActiveDirectoryGroupRealm
-#activeDirectoryRealm.systemUsername = userNameA
-
-#use either systemPassword or hadoopSecurityCredentialPath, more details in 
http://zeppelin.apache.org/docs/latest/security/shiroauthentication.html
-#activeDirectoryRealm.systemPassword = passwordA
-#activeDirectoryRealm.hadoopSecurityCredentialPath = 
jceks://file/user/zeppelin/zeppelin.jceks
-#activeDirectoryRealm.searchBase = CN=Users,DC=SOME_GROUP,DC=COMPANY,DC=COM
-#activeDirectoryRealm.url = ldap://ldap.test.com:389
-#activeDirectoryRealm.groupRolesMap = 
"CN=admin,OU=groups,DC=SOME_GROUP,DC=COMPANY,DC=COM":"admin","CN=finance,OU=groups,DC=SOME_GROUP,DC=COMPANY,DC=COM":"finance","CN=hr,OU=groups,DC=SOME_GROUP,DC=COMPANY,DC=COM":"hr"
-#activeDirectoryRealm.authorizationCachingEnabled = false
-
-### A sample for configuring LDAP Directory Realm
-#ldapRealm = org.apache.zeppelin.realm.LdapGroupRealm
-## search base for ldap groups (only relevant for LdapGroupRealm):
-#ldapRealm.contextFactory.environment[ldap.searchBase] = dc=COMPANY,dc=COM
-#ldapRealm.contextFactory.url = ldap://ldap.test.com:389
-#ldapRealm.userDnTemplate = uid={0},ou=Users,dc=COMPANY,dc=COM
-#ldapRealm.contextFactory.authenticationMechanism = SIMPLE
-
-### A sample PAM configuration
-#pamRealm=org.apache.zeppelin.realm.PamRealm
-#pamRealm.service=sshd
-
-
-sessionManager = org.apache.shiro.web.session.mgt.DefaultWebSessionManager
-### If caching of user is required then uncomment below lines
-cacheManager = org.apache.shiro.cache.MemoryConstrainedCacheManager
-securityManager.cacheManager = $cacheManager
-
-cookie = org.apache.shiro.web.servlet.SimpleCookie
-cookie.name = JSESSIONID
-#Uncomment the line below when running Zeppelin-Server in HTTPS mode
-#cookie.secure = true
-cookie.httpOnly = true
-sessionManager.sessionIdCookie = $cookie
-
-securityManager.sessionManager = $sessionManager
-# 86,400,000 milliseconds = 24 hour
-securityManager.sessionManager.globalSessionTimeout = 86400000
-shiro.loginUrl = /api/login
-
-[roles]
-role1 = *
-role2 = *
-role3 = *
-admin = *
-
-[urls]
-# This section is used for url-based security.
-# You can secure interpreter, configuration and credential information by 
urls. Comment or uncomment the below urls that you want to hide.
-# anon means the access is anonymous.
-# authc means Form based Auth Security
-# To enfore security, comment the line below and uncomment the next one
-/api/version = anon
-#/api/interpreter/** = authc, roles[admin]
-#/api/configurations/** = authc, roles[admin]
-#/api/credential/** = authc, roles[admin]
-#/** = anon
-/** = authc
-      </value>
-    <on-ambari-upgrade add="true"/>
-  </property>
-</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/78afc58d/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/kerberos.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/kerberos.json
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/kerberos.json
deleted file mode 100644
index b605c9d..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/kerberos.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
-  "services": [
-    {
-      "name": "ZEPPELIN",
-      "identities": [
-        {
-          "name": "/smokeuser"
-        },
-        {
-          "name": "zeppelin_user",
-          "principal": {
-            "value": 
"${zeppelin-env/zeppelin_user}${principal_suffix}@${realm}",
-            "type" : "user",
-            "configuration": "zeppelin-env/zeppelin.server.kerberos.principal",
-            "local_username" : "${zeppelin-env/zeppelin_user}"
-          },
-          "keytab": {
-            "file": "${keytab_dir}/zeppelin.server.kerberos.keytab",
-            "owner": {
-              "name": "${zeppelin-env/zeppelin_user}",
-              "access": "r"
-            },
-            "group": {
-              "name": "${cluster-env/user_group}",
-              "access": ""
-            },
-            "configuration": "zeppelin-env/zeppelin.server.kerberos.keytab"
-          }
-        }
-      ],
-      "components": [
-        {
-          "name": "ZEPPELIN_MASTER"
-        }
-      ],
-      "configurations": [
-        {
-          "zeppelin-env": {
-            "zeppelin.kerberos.enabled": "true"
-          }
-        },
-        {
-          "core-site": {
-            "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.groups": "*",
-            "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.hosts": "*"
-          }
-        }
-      ]
-    }
-  ]
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/78afc58d/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/metainfo.xml
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/metainfo.xml
deleted file mode 100644
index e29eae3..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/metainfo.xml
+++ /dev/null
@@ -1,103 +0,0 @@
-<?xml version="1.0"?>
-<!--
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements.  See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>ZEPPELIN</name>
-      <displayName>Zeppelin Notebook</displayName>
-      <comment>A web-based notebook that enables interactive data analytics. 
It enables you to
-        make beautiful data-driven, interactive and collaborative documents 
with SQL, Scala
-        and more.
-      </comment>
-      <version>0.6.0.2.5</version>
-      <components>
-        <component>
-          <name>ZEPPELIN_MASTER</name>
-          <displayName>Zeppelin Notebook</displayName>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <versionAdvertised>true</versionAdvertised>
-          <commandScript>
-            <script>scripts/master.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>10000</timeout>
-          </commandScript>
-          <dependencies>
-            <dependency>
-              <name>SPARK/SPARK_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>YARN/YARN_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-          <logs>
-            <log>
-              <logId>zeppelin</logId>
-              <primary>true</primary>
-            </log>
-          </logs>
-        </component>
-      </components>
-
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-          <packages>
-            <package>
-              <name>zeppelin</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-
-      <requiredServices>
-        <service>HDFS</service>
-      </requiredServices>
-
-      <configuration-dependencies>
-        <config-type>zeppelin-config</config-type>
-        <config-type>zeppelin-env</config-type>
-        <config-type>zeppelin-shiro-ini</config-type>
-        <config-type>zeppelin-log4j-properties</config-type>
-      </configuration-dependencies>
-      <restartRequiredAfterChange>true</restartRequiredAfterChange>
-
-      <quickLinksConfigurations>
-        <quickLinksConfiguration>
-          <fileName>quicklinks.json</fileName>
-          <default>true</default>
-        </quickLinksConfiguration>
-      </quickLinksConfigurations>
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/78afc58d/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/alert_check_zeppelin.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/alert_check_zeppelin.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/alert_check_zeppelin.py
deleted file mode 100644
index e6d7a91..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/alert_check_zeppelin.py
+++ /dev/null
@@ -1,47 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import glob
-import sys
-
-from resource_management.core.exceptions import ComponentIsNotRunning
-from resource_management.libraries.functions.check_process_status import 
check_process_status
-from resource_management.libraries.script import Script
-
-reload(sys)
-sys.setdefaultencoding('utf8')
-config = Script.get_config()
-
-zeppelin_pid_dir = config['configurations']['zeppelin-env']['zeppelin_pid_dir']
-
-RESULT_CODE_OK = 'OK'
-RESULT_CODE_CRITICAL = 'CRITICAL'
-RESULT_CODE_UNKNOWN = 'UNKNOWN'
-
-
-def execute(configurations={}, parameters={}, host_name=None):
-  try:
-    pid_file = glob.glob(zeppelin_pid_dir + '/zeppelin-*.pid')[0]
-    check_process_status(pid_file)
-  except ComponentIsNotRunning as ex:
-    return (RESULT_CODE_CRITICAL, [str(ex)])
-  except:
-    return (RESULT_CODE_CRITICAL, ["Zeppelin is not running"])
-
-  return (RESULT_CODE_OK, ["Successful connection to Zeppelin"])

http://git-wip-us.apache.org/repos/asf/ambari/blob/78afc58d/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/interpreter_json_template.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/interpreter_json_template.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/interpreter_json_template.py
deleted file mode 100644
index 6a98919..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/interpreter_json_template.py
+++ /dev/null
@@ -1,361 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-template = '''
-{
-  "interpreterSettings": {
-    "2CKEKWY8Z": {
-      "id": "2CKEKWY8Z",
-      "name": "angular",
-      "group": "angular",
-      "properties": {},
-      "status": "READY",
-      "interpreterGroup": [
-        {
-          "name": "angular",
-          "class": "org.apache.zeppelin.angular.AngularInterpreter",
-          "defaultInterpreter": false,
-          "editor": {
-            "editOnDblClick": true
-          }
-        }
-      ],
-      "dependencies": [],
-      "option": {
-        "remote": true,
-        "port": -1,
-        "perNote": "shared",
-        "perUser": "shared",
-        "isExistingProcess": false,
-        "setPermission": false,
-        "users": [],
-        "isUserImpersonate": false
-      }
-    },
-    "2CKX8WPU1": {
-      "id": "2CKX8WPU1",
-      "name": "spark",
-      "group": "spark",
-      "properties": {
-        "spark.executor.memory": "512m",
-        "args": "",
-        "zeppelin.spark.printREPLOutput": "true",
-        "spark.cores.max": "",
-        "zeppelin.dep.additionalRemoteRepository": 
"spark-packages,http://dl.bintray.com/spark-packages/maven,false;";,
-        "zeppelin.spark.sql.stacktrace": "false",
-        "zeppelin.spark.importImplicit": "true",
-        "zeppelin.spark.concurrentSQL": "false",
-        "zeppelin.spark.useHiveContext": "true",
-        "zeppelin.pyspark.python": "python",
-        "zeppelin.dep.localrepo": "local-repo",
-        "zeppelin.R.knitr": "true",
-        "zeppelin.spark.maxResult": "1000",
-        "master": "yarn-client",
-        "spark.app.name": "Zeppelin",
-        "zeppelin.R.image.width": "100%",
-        "zeppelin.R.render.options": "out.format \u003d \u0027html\u0027, 
comment \u003d NA, echo \u003d FALSE, results \u003d \u0027asis\u0027, message 
\u003d F, warning \u003d F",
-        "zeppelin.R.cmd": "R"
-      },
-      "status": "READY",
-      "interpreterGroup": [
-        {
-          "name": "spark",
-          "class": "org.apache.zeppelin.spark.SparkInterpreter",
-          "defaultInterpreter": true,
-          "editor": {
-            "language": "scala"
-          }
-        },
-        {
-          "name": "sql",
-          "class": "org.apache.zeppelin.spark.SparkSqlInterpreter",
-          "defaultInterpreter": false,
-          "editor": {
-            "language": "sql"
-          }
-        },
-        {
-          "name": "dep",
-          "class": "org.apache.zeppelin.spark.DepInterpreter",
-          "defaultInterpreter": false,
-          "editor": {
-            "language": "scala"
-          }
-        },
-        {
-          "name": "pyspark",
-          "class": "org.apache.zeppelin.spark.PySparkInterpreter",
-          "defaultInterpreter": false,
-          "editor": {
-            "language": "python"
-          }
-        },
-        {
-          "name": "r",
-          "class": "org.apache.zeppelin.spark.SparkRInterpreter",
-          "defaultInterpreter": false,
-          "editor": {
-            "language": "r"
-          }
-        }
-      ],
-      "dependencies": [],
-      "option": {
-        "remote": true,
-        "port": -1,
-        "perNote": "shared",
-        "perUser": "shared",
-        "isExistingProcess": false,
-        "setPermission": false,
-        "users": [],
-        "isUserImpersonate": false
-      }
-    },
-    "2CK8A9MEG": {
-      "id": "2CK8A9MEG",
-      "name": "jdbc",
-      "group": "jdbc",
-      "properties": {
-        "default.password": "",
-        "zeppelin.jdbc.auth.type": "",
-        "common.max_count": "1000",
-        "zeppelin.jdbc.principal": "",
-        "default.user": "gpadmin",
-        "default.url": "jdbc:postgresql://localhost:5432/",
-        "default.driver": "org.postgresql.Driver",
-        "zeppelin.jdbc.keytab.location": "",
-        "zeppelin.jdbc.concurrent.use": "true",
-        "zeppelin.jdbc.concurrent.max_connection": "10"
-      },
-      "status": "READY",
-      "interpreterGroup": [
-        {
-          "name": "sql",
-          "class": "org.apache.zeppelin.jdbc.JDBCInterpreter",
-          "defaultInterpreter": false,
-          "editor": {
-            "language": "sql",
-            "editOnDblClick": false
-          }
-        }
-      ],
-      "dependencies": [],
-      "option": {
-        "remote": true,
-        "port": -1,
-        "perNote": "shared",
-        "perUser": "shared",
-        "isExistingProcess": false,
-        "setPermission": false,
-        "users": [],
-        "isUserImpersonate": false
-      }
-    },
-    "2CKX6DGQZ": {
-      "id": "2CKX6DGQZ",
-      "name": "livy",
-      "group": "livy",
-      "properties": {
-        "zeppelin.livy.pull_status.interval.millis": "1000",
-        "livy.spark.executor.memory": "",
-        "zeppelin.livy.session.create_timeout": "120",
-        "zeppelin.livy.principal": "",
-        "zeppelin.livy.spark.sql.maxResult": "1000",
-        "zeppelin.livy.keytab": "",
-        "zeppelin.livy.concurrentSQL": "false",
-        "zeppelin.livy.spark.sql.field.truncate": "true",
-        "livy.spark.executor.cores": "",
-        "zeppelin.livy.displayAppInfo": "false",
-        "zeppelin.livy.url": "http://localhost:8998";,
-        "livy.spark.dynamicAllocation.minExecutors": "",
-        "livy.spark.driver.cores": "",
-        "livy.spark.jars.packages": "",
-        "livy.spark.dynamicAllocation.enabled": "",
-        "livy.spark.executor.instances": "",
-        "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": "",
-        "livy.spark.dynamicAllocation.maxExecutors": "",
-        "livy.spark.dynamicAllocation.initialExecutors": "",
-        "livy.spark.driver.memory": ""
-      },
-      "status": "READY",
-      "interpreterGroup": [
-        {
-          "name": "spark",
-          "class": "org.apache.zeppelin.livy.LivySparkInterpreter",
-          "defaultInterpreter": true,
-          "editor": {
-            "language": "scala",
-            "editOnDblClick": false
-          }
-        },
-        {
-          "name": "sql",
-          "class": "org.apache.zeppelin.livy.LivySparkSQLInterpreter",
-          "defaultInterpreter": false,
-          "editor": {
-            "language": "sql",
-            "editOnDblClick": false
-          }
-        },
-        {
-          "name": "pyspark",
-          "class": "org.apache.zeppelin.livy.LivyPySparkInterpreter",
-          "defaultInterpreter": false,
-          "editor": {
-            "language": "python",
-            "editOnDblClick": false
-          }
-        },
-        {
-          "name": "pyspark3",
-          "class": "org.apache.zeppelin.livy.LivyPySpark3Interpreter",
-          "defaultInterpreter": false,
-          "editor": {
-            "language": "python",
-            "editOnDblClick": false
-          }
-        },
-        {
-          "name": "sparkr",
-          "class": "org.apache.zeppelin.livy.LivySparkRInterpreter",
-          "defaultInterpreter": false,
-          "editor": {
-            "language": "r",
-            "editOnDblClick": false
-          }
-        }
-      ],
-      "dependencies": [],
-      "option": {
-        "remote": true,
-        "port": -1,
-        "perNote": "shared",
-        "perUser": "scoped",
-        "isExistingProcess": false,
-        "setPermission": false,
-        "users": [],
-        "isUserImpersonate": false
-      }
-    },
-    "2CKAY1A8Y": {
-      "id": "2CKAY1A8Y",
-      "name": "md",
-      "group": "md",
-      "properties": {
-        "markdown.parser.type": "pegdown"
-      },
-      "status": "READY",
-      "interpreterGroup": [
-        {
-          "name": "md",
-          "class": "org.apache.zeppelin.markdown.Markdown",
-          "defaultInterpreter": false,
-          "editor": {
-            "language": "markdown",
-            "editOnDblClick": true
-          }
-        }
-      ],
-      "dependencies": [],
-      "option": {
-        "remote": true,
-        "port": -1,
-        "perNote": "shared",
-        "perUser": "shared",
-        "isExistingProcess": false,
-        "setPermission": false,
-        "users": [],
-        "isUserImpersonate": false
-      }
-    },
-    "2CHS8UYQQ": {
-      "id": "2CHS8UYQQ",
-      "name": "sh",
-      "group": "sh",
-      "properties": {
-        "zeppelin.shell.keytab.location": "",
-        "shell.command.timeout.millisecs": "60000",
-        "zeppelin.shell.principal": "",
-        "zeppelin.shell.auth.type": ""
-      },
-      "status": "READY",
-      "interpreterGroup": [
-        {
-          "name": "sh",
-          "class": "org.apache.zeppelin.shell.ShellInterpreter",
-          "defaultInterpreter": false,
-          "editor": {
-            "language": "sh",
-            "editOnDblClick": false
-          }
-        }
-      ],
-      "dependencies": [],
-      "option": {
-        "remote": true,
-        "port": -1,
-        "perNote": "shared",
-        "perUser": "shared",
-        "isExistingProcess": false,
-        "setPermission": false,
-        "users": [],
-        "isUserImpersonate": false
-      }
-    }
-  },
-  "interpreterBindings": {},
-  "interpreterRepositories": [
-    {
-      "id": "central",
-      "type": "default",
-      "url": "http://repo1.maven.org/maven2/";,
-      "releasePolicy": {
-        "enabled": true,
-        "updatePolicy": "daily",
-        "checksumPolicy": "warn"
-      },
-      "snapshotPolicy": {
-        "enabled": true,
-        "updatePolicy": "daily",
-        "checksumPolicy": "warn"
-      },
-      "mirroredRepositories": [],
-      "repositoryManager": false
-    },
-    {
-      "id": "local",
-      "type": "default",
-      "url": "file:///home/zeppelin/.m2/repository",
-      "releasePolicy": {
-        "enabled": true,
-        "updatePolicy": "daily",
-        "checksumPolicy": "warn"
-      },
-      "snapshotPolicy": {
-        "enabled": true,
-        "updatePolicy": "daily",
-        "checksumPolicy": "warn"
-      },
-      "mirroredRepositories": [],
-      "repositoryManager": false
-    }
-  ]
-}
-'''

http://git-wip-us.apache.org/repos/asf/ambari/blob/78afc58d/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/livy2_config_template.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/livy2_config_template.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/livy2_config_template.py
deleted file mode 100644
index 71d3817..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/livy2_config_template.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-template = '''
-{
-  "id": "2C8A4SZ9T_livy2",
-  "status": "READY",
-  "group": "livy",
-  "name": "livy2",
-  "properties": {
-    "zeppelin.livy.keytab": "",
-    "zeppelin.livy.spark.sql.maxResult": "1000",
-    "livy.spark.executor.instances": "",
-    "livy.spark.executor.memory": "",
-    "livy.spark.dynamicAllocation.enabled": "",
-    "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": "",
-    "livy.spark.dynamicAllocation.initialExecutors": "",
-    "zeppelin.livy.session.create_timeout": "120",
-    "livy.spark.driver.memory": "",
-    "zeppelin.livy.displayAppInfo": "false",
-    "livy.spark.jars.packages": "",
-    "livy.spark.dynamicAllocation.maxExecutors": "",
-    "zeppelin.livy.concurrentSQL": "false",
-    "zeppelin.livy.principal": "",
-    "livy.spark.executor.cores": "",
-    "zeppelin.livy.url": "http://localhost:8998";,
-    "zeppelin.livy.pull_status.interval.millis": "1000",
-    "livy.spark.driver.cores": "",
-    "livy.spark.dynamicAllocation.minExecutors": ""
-  },
-  "interpreterGroup": [
-    {
-      "class": "org.apache.zeppelin.livy.LivySparkInterpreter",
-      "editor": {
-        "editOnDblClick": false,
-        "language": "scala"
-      },
-      "name": "spark",
-      "defaultInterpreter": false
-    },
-    {
-      "class": "org.apache.zeppelin.livy.LivySparkSQLInterpreter",
-      "editor": {
-        "editOnDblClick": false,
-        "language": "sql"
-      },
-      "name": "sql",
-      "defaultInterpreter": false
-    },
-    {
-      "class": "org.apache.zeppelin.livy.LivyPySparkInterpreter",
-      "editor": {
-        "editOnDblClick": false,
-        "language": "python"
-      },
-      "name": "pyspark",
-      "defaultInterpreter": false
-              },
-    {
-      "class": "org.apache.zeppelin.livy.LivyPySpark3Interpreter",
-      "editor": {
-        "editOnDblClick": false,
-        "language": "python"
-      },
-      "name": "pyspark3",
-      "defaultInterpreter": false
-    },
-    {
-      "class": "org.apache.zeppelin.livy.LivySparkRInterpreter",
-      "editor": {
-        "editOnDblClick": false,
-        "language": "r"
-      },
-      "name": "sparkr",
-      "defaultInterpreter": false
-    }
-  ],
-  "dependencies": [],
-  "option": {
-    "setPermission": false,
-    "remote": true,
-    "users": [],
-    "isExistingProcess": false,
-    "perUser": "scoped",
-    "isUserImpersonate": false,
-    "perNote": "shared",
-    "port": -1
-  }
-}
-'''

http://git-wip-us.apache.org/repos/asf/ambari/blob/78afc58d/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
deleted file mode 100644
index ba73d10..0000000
--- 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
+++ /dev/null
@@ -1,522 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import glob
-import os
-
-from resource_management.core import shell, sudo
-from resource_management.core.logger import Logger
-from resource_management.core.resources import Directory
-from resource_management.core.resources.system import Execute, File
-from resource_management.core.source import InlineTemplate
-from resource_management.libraries import XmlConfig
-from resource_management.libraries.functions import StackFeature
-from resource_management.libraries.functions import get_kinit_path
-from resource_management.libraries.functions import stack_select
-from resource_management.libraries.functions.check_process_status import 
check_process_status
-from resource_management.libraries.functions.default import default
-from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions.stack_features import 
check_stack_feature
-from resource_management.libraries.functions.version import 
format_stack_version
-from resource_management.libraries.script.script import Script
-
-
-class Master(Script):
-  def install(self, env):
-    import params
-    env.set_params(params)
-    self.install_packages(env)
-
-    self.create_zeppelin_log_dir(env)
-
-    if params.spark_version:
-      Execute('echo spark_version:' + str(params.spark_version) + ' detected 
for spark_home: '
-              + params.spark_home + ' >> ' + params.zeppelin_log_file, 
user=params.zeppelin_user)
-    if params.spark2_version:
-      Execute('echo spark2_version:' + str(params.spark2_version) + ' detected 
for spark2_home: '
-              + params.spark2_home + ' >> ' + params.zeppelin_log_file, 
user=params.zeppelin_user)
-
-  def create_zeppelin_dir(self, params):
-    params.HdfsResource(format("/user/{zeppelin_user}"),
-                        type="directory",
-                        action="create_on_execute",
-                        owner=params.zeppelin_user,
-                        recursive_chown=True,
-                        recursive_chmod=True
-                        )
-    params.HdfsResource(format("/user/{zeppelin_user}/test"),
-                        type="directory",
-                        action="create_on_execute",
-                        owner=params.zeppelin_user,
-                        recursive_chown=True,
-                        recursive_chmod=True
-                        )
-    params.HdfsResource(format("/apps/zeppelin"),
-                        type="directory",
-                        action="create_on_execute",
-                        owner=params.zeppelin_user,
-                        recursive_chown=True,
-                        recursive_chmod=True
-                        )
-
-    spark_deps_full_path = self.get_zeppelin_spark_dependencies()[0]
-    spark_dep_file_name = os.path.basename(spark_deps_full_path)
-
-    params.HdfsResource(params.spark_jar_dir + "/" + spark_dep_file_name,
-                        type="file",
-                        action="create_on_execute",
-                        source=spark_deps_full_path,
-                        group=params.zeppelin_group,
-                        owner=params.zeppelin_user,
-                        mode=0444,
-                        replace_existing_files=True,
-                        )
-
-    params.HdfsResource(None, action="execute")
-
-  def create_zeppelin_log_dir(self, env):
-    import params
-    env.set_params(params)
-    Directory([params.zeppelin_log_dir],
-              owner=params.zeppelin_user,
-              group=params.zeppelin_group,
-              cd_access="a",
-              create_parents=True,
-              mode=0755
-              )
-
-  def create_zeppelin_hdfs_conf_dir(self, env):
-    import params
-    env.set_params(params)
-    Directory([params.external_dependency_conf],
-              owner=params.zeppelin_user,
-              group=params.zeppelin_group,
-              cd_access="a",
-              create_parents=True,
-              mode=0755
-              )
-
-  def chown_zeppelin_pid_dir(self, env):
-    import params
-    env.set_params(params)
-    Execute(("chown", "-R", format("{zeppelin_user}") + ":" + 
format("{zeppelin_group}"), params.zeppelin_pid_dir),
-            sudo=True)
-
-  def configure(self, env):
-    import params
-    import status_params
-    env.set_params(params)
-    env.set_params(status_params)
-    self.create_zeppelin_log_dir(env)
-
-    # create the pid and zeppelin dirs
-    Directory([params.zeppelin_pid_dir, params.zeppelin_dir],
-              owner=params.zeppelin_user,
-              group=params.zeppelin_group,
-              cd_access="a",
-              create_parents=True,
-              mode=0755
-    )
-    self.chown_zeppelin_pid_dir(env)
-
-    # write out zeppelin-site.xml
-    XmlConfig("zeppelin-site.xml",
-              conf_dir=params.conf_dir,
-              
configurations=params.config['configurations']['zeppelin-config'],
-              owner=params.zeppelin_user,
-              group=params.zeppelin_group
-              )
-    # write out zeppelin-env.sh
-    env_content = InlineTemplate(params.zeppelin_env_content)
-    File(format("{params.conf_dir}/zeppelin-env.sh"), content=env_content,
-         owner=params.zeppelin_user, group=params.zeppelin_group)
-
-    # write out shiro.ini
-    shiro_ini_content = InlineTemplate(params.shiro_ini_content)
-    File(format("{params.conf_dir}/shiro.ini"), content=shiro_ini_content,
-         owner=params.zeppelin_user, group=params.zeppelin_group)
-
-    # write out log4j.properties
-    File(format("{params.conf_dir}/log4j.properties"), 
content=params.log4j_properties_content,
-         owner=params.zeppelin_user, group=params.zeppelin_group)
-
-    self.create_zeppelin_hdfs_conf_dir(env)
-
-    if len(params.hbase_master_hosts) > 0 and params.is_hbase_installed:
-      # copy hbase-site.xml
-      XmlConfig("hbase-site.xml",
-              conf_dir=params.external_dependency_conf,
-              configurations=params.config['configurations']['hbase-site'],
-              
configuration_attributes=params.config['configuration_attributes']['hbase-site'],
-              owner=params.zeppelin_user,
-              group=params.zeppelin_group,
-              mode=0644)
-
-      XmlConfig("hdfs-site.xml",
-                conf_dir=params.external_dependency_conf,
-                configurations=params.config['configurations']['hdfs-site'],
-                
configuration_attributes=params.config['configuration_attributes']['hdfs-site'],
-                owner=params.zeppelin_user,
-                group=params.zeppelin_group,
-                mode=0644)
-
-      XmlConfig("core-site.xml",
-                conf_dir=params.external_dependency_conf,
-                configurations=params.config['configurations']['core-site'],
-                
configuration_attributes=params.config['configuration_attributes']['core-site'],
-                owner=params.zeppelin_user,
-                group=params.zeppelin_group,
-                mode=0644)
-
-  def check_and_copy_notebook_in_hdfs(self, params):
-    if 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir'].startswith("/"):
-      notebook_directory = 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir']
-    else:
-      notebook_directory = "/user/" + format("{zeppelin_user}") + "/" + \
-                           
params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir']
-
-    kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
-    kinit_if_needed = format("{kinit_path_local} -kt 
{zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
-
-    notebook_directory_exists = shell.call(format("{kinit_if_needed} hdfs 
--config {hadoop_conf_dir} dfs -test -e {notebook_directory};echo $?"),
-                                           user=params.zeppelin_user)[1]
-
-    #if there is no kerberos setup then the string will contain "-bash: kinit: 
command not found"
-    if "\n" in notebook_directory_exists:
-      notebook_directory_exists = notebook_directory_exists.split("\n")[1]
-
-    # '1' means it does not exists
-    if notebook_directory_exists == '1':
-      # hdfs dfs -mkdir {notebook_directory}
-      params.HdfsResource(format("{notebook_directory}"),
-                          type="directory",
-                          action="create_on_execute",
-                          owner=params.zeppelin_user,
-                          recursive_chown=True,
-                          recursive_chmod=True
-                          )
-
-      # hdfs dfs -put /usr/hdp/current/zeppelin-server/notebook/ 
{notebook_directory}
-      params.HdfsResource(format("{notebook_directory}"),
-                            type="directory",
-                            action="create_on_execute",
-                            source=params.notebook_dir,
-                            owner=params.zeppelin_user,
-                            recursive_chown=True,
-                            recursive_chmod=True
-                            )
-
-
-  def stop(self, env, upgrade_type=None):
-    import params
-    self.create_zeppelin_log_dir(env)
-    self.chown_zeppelin_pid_dir(env)
-    Execute(params.zeppelin_dir + '/bin/zeppelin-daemon.sh stop >> ' + 
params.zeppelin_log_file,
-            user=params.zeppelin_user)
-
-  def start(self, env, upgrade_type=None):
-    import params
-    import status_params
-    self.configure(env)
-
-    Execute(("chown", "-R", format("{zeppelin_user}") + ":" + 
format("{zeppelin_group}"), "/etc/zeppelin"),
-            sudo=True)
-    Execute(("chown", "-R", format("{zeppelin_user}") + ":" + 
format("{zeppelin_group}"),
-             os.path.join(params.zeppelin_dir, "notebook")), sudo=True)
-
-    if 'zeppelin.notebook.storage' in 
params.config['configurations']['zeppelin-config'] \
-        and 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] 
== 'org.apache.zeppelin.notebook.repo.HdfsNotebookRepo':
-      self.check_and_copy_notebook_in_hdfs(params)
-
-    if params.security_enabled:
-        zeppelin_kinit_cmd = format("{kinit_path_local} -kt 
{zeppelin_kerberos_keytab} {zeppelin_kerberos_principal}; ")
-        Execute(zeppelin_kinit_cmd, user=params.zeppelin_user)
-
-    zeppelin_spark_dependencies = self.get_zeppelin_spark_dependencies()
-    if zeppelin_spark_dependencies and 
os.path.exists(zeppelin_spark_dependencies[0]):
-      self.create_zeppelin_dir(params)
-
-    # if first_setup:
-    if not glob.glob(params.conf_dir + "/interpreter.json") and \
-      not os.path.exists(params.conf_dir + "/interpreter.json"):
-      self.create_interpreter_json()
-      self.update_zeppelin_interpreter()
-
-    if params.zeppelin_interpreter_config_upgrade == True:
-      self.reset_interpreter_settings()
-      self.update_zeppelin_interpreter()
-
-    Execute(params.zeppelin_dir + '/bin/zeppelin-daemon.sh restart >> '
-            + params.zeppelin_log_file, user=params.zeppelin_user)
-    pidfile = glob.glob(os.path.join(status_params.zeppelin_pid_dir,
-                                     'zeppelin-' + params.zeppelin_user + 
'*.pid'))[0]
-    Logger.info(format("Pid file is: {pidfile}"))
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-
-    try:
-        pid_file = glob.glob(status_params.zeppelin_pid_dir + '/zeppelin-' +
-                             status_params.zeppelin_user + '*.pid')[0]
-    except IndexError:
-        pid_file = ''
-    check_process_status(pid_file)
-
-  def reset_interpreter_settings(self):
-    import json
-    import interpreter_json_template
-    interpreter_json_template = 
json.loads(interpreter_json_template.template)['interpreterSettings']
-    config_data = self.get_interpreter_settings()
-    interpreter_settings = config_data['interpreterSettings']
-
-    for setting_key in interpreter_json_template.keys():
-      if setting_key not in interpreter_settings:
-        interpreter_settings[setting_key] = interpreter_json_template[
-          setting_key]
-
-    self.set_interpreter_settings(config_data)
-
-  def get_interpreter_settings(self):
-    import params
-    import json
-
-    interpreter_config = os.path.join(params.conf_dir, "interpreter.json")
-    config_content = sudo.read_file(interpreter_config)
-    config_data = json.loads(config_content)
-    return config_data
-
-  def pre_upgrade_restart(self, env, upgrade_type=None):
-    Logger.info("Executing Stack Upgrade pre-restart")
-    import params
-    env.set_params(params)
-
-    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
format_stack_version(params.version)):
-      stack_select.select_packages(params.version)
-
-  def set_interpreter_settings(self, config_data):
-    import params
-    import json
-
-    interpreter_config = os.path.join(params.conf_dir, "interpreter.json")
-    File(interpreter_config,
-         group=params.zeppelin_group,
-         owner=params.zeppelin_user,
-         content=json.dumps(config_data, indent=2)
-         )
-
-  def update_kerberos_properties(self):
-    import params
-    config_data = self.get_interpreter_settings()
-    interpreter_settings = config_data['interpreterSettings']
-    for interpreter_setting in interpreter_settings:
-      interpreter = interpreter_settings[interpreter_setting]
-      if interpreter['group'] == 'livy' and params.livy_livyserver_host:
-        if params.zeppelin_kerberos_principal and 
params.zeppelin_kerberos_keytab and params.security_enabled:
-          interpreter['properties']['zeppelin.livy.principal'] = 
params.zeppelin_kerberos_principal
-          interpreter['properties']['zeppelin.livy.keytab'] = 
params.zeppelin_kerberos_keytab
-        else:
-          interpreter['properties']['zeppelin.livy.principal'] = ""
-          interpreter['properties']['zeppelin.livy.keytab'] = ""
-      elif interpreter['group'] == 'spark':
-        if params.zeppelin_kerberos_principal and 
params.zeppelin_kerberos_keytab and params.security_enabled:
-          interpreter['properties']['spark.yarn.principal'] = 
params.zeppelin_kerberos_principal
-          interpreter['properties']['spark.yarn.keytab'] = 
params.zeppelin_kerberos_keytab
-        else:
-          interpreter['properties']['spark.yarn.principal'] = ""
-          interpreter['properties']['spark.yarn.keytab'] = ""
-      elif interpreter['group'] == 'jdbc':
-        if params.zeppelin_kerberos_principal and 
params.zeppelin_kerberos_keytab and params.security_enabled:
-          interpreter['properties']['zeppelin.jdbc.auth.type'] = "KERBEROS"
-          interpreter['properties']['zeppelin.jdbc.principal'] = 
params.zeppelin_kerberos_principal
-          interpreter['properties']['zeppelin.jdbc.keytab.location'] = 
params.zeppelin_kerberos_keytab
-          if params.zookeeper_znode_parent \
-              and params.hbase_zookeeper_quorum \
-              and 'phoenix.url' in interpreter['properties'] \
-              and params.zookeeper_znode_parent not in 
interpreter['properties']['phoenix.url']:
-            interpreter['properties']['phoenix.url'] = "jdbc:phoenix:" + \
-                                                       
params.hbase_zookeeper_quorum + ':' + \
-                                                       
params.zookeeper_znode_parent
-        else:
-          interpreter['properties']['zeppelin.jdbc.auth.type'] = "SIMPLE"
-          interpreter['properties']['zeppelin.jdbc.principal'] = ""
-          interpreter['properties']['zeppelin.jdbc.keytab.location'] = ""
-      elif interpreter['group'] == 'sh':
-        if params.zeppelin_kerberos_principal and 
params.zeppelin_kerberos_keytab and params.security_enabled:
-          interpreter['properties']['zeppelin.shell.auth.type'] = "KERBEROS"
-          interpreter['properties']['zeppelin.shell.principal'] = 
params.zeppelin_kerberos_principal
-          interpreter['properties']['zeppelin.shell.keytab.location'] = 
params.zeppelin_kerberos_keytab
-        else:
-          interpreter['properties']['zeppelin.shell.auth.type'] = ""
-          interpreter['properties']['zeppelin.shell.principal'] = ""
-          interpreter['properties']['zeppelin.shell.keytab.location'] = ""
-
-    self.set_interpreter_settings(config_data)
-
-  def update_zeppelin_interpreter(self):
-    import params
-    config_data = self.get_interpreter_settings()
-    interpreter_settings = config_data['interpreterSettings']
-
-    if 'spark2-defaults' in params.config['configurations']:
-      spark2_config = self.get_spark2_interpreter_config()
-      config_id = spark2_config["id"]
-      interpreter_settings[config_id] = spark2_config
-
-    if params.livy2_livyserver_host:
-      livy2_config = self.get_livy2_interpreter_config()
-      config_id = livy2_config["id"]
-      interpreter_settings[config_id] = livy2_config
-
-    if params.zeppelin_interpreter:
-      settings_to_delete = []
-      for settings_key, interpreter in interpreter_settings.items():
-        if interpreter['group'] not in params.zeppelin_interpreter:
-          settings_to_delete.append(settings_key)
-
-      for key in settings_to_delete:
-        del interpreter_settings[key]
-
-    hive_interactive_properties_key = 'hive_interactive'
-    for setting_key in interpreter_settings.keys():
-      interpreter = interpreter_settings[setting_key]
-      if interpreter['group'] == 'jdbc':
-        interpreter['dependencies'] = []
-
-        if not params.hive_server_host and 
params.hive_server_interactive_hosts:
-          hive_interactive_properties_key = 'hive'
-
-        if params.hive_server_host:
-          interpreter['properties']['hive.driver'] = 
'org.apache.hive.jdbc.HiveDriver'
-          interpreter['properties']['hive.user'] = 'hive'
-          interpreter['properties']['hive.password'] = ''
-          interpreter['properties']['hive.proxy.user.property'] = 
'hive.server2.proxy.user'
-          if params.hive_server2_support_dynamic_service_discovery:
-            interpreter['properties']['hive.url'] = 'jdbc:hive2://' + \
-                                                 params.hive_zookeeper_quorum 
+ \
-                                                 '/;' + 
'serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=' + \
-                                                    
params.hive_zookeeper_namespace
-          else:
-            interpreter['properties']['hive.url'] = 'jdbc:hive2://' + \
-                                                 params.hive_server_host + \
-                                                     ':' + 
params.hive_server_port
-        if params.hive_server_interactive_hosts:
-          interpreter['properties'][hive_interactive_properties_key + 
'.driver'] = 'org.apache.hive.jdbc.HiveDriver'
-          interpreter['properties'][hive_interactive_properties_key + '.user'] 
= 'hive'
-          interpreter['properties'][hive_interactive_properties_key + 
'.password'] = ''
-          interpreter['properties'][hive_interactive_properties_key + 
'.proxy.user.property'] = 'hive.server2.proxy.user'
-          if params.hive_server2_support_dynamic_service_discovery:
-            interpreter['properties'][hive_interactive_properties_key + 
'.url'] = 'jdbc:hive2://' + \
-                                                    
params.hive_zookeeper_quorum + \
-                                                    '/;' + 
'serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=' + \
-                                                    
params.hive_interactive_zookeeper_namespace
-          else:
-            interpreter['properties'][hive_interactive_properties_key + 
'.url'] = 'jdbc:hive2://' + \
-                                                    
params.hive_server_interactive_hosts + \
-                                                    ':' + 
params.hive_server_port
-
-        if params.spark_thrift_server_hosts:
-          interpreter['properties']['spark.driver'] = 
'org.apache.hive.jdbc.HiveDriver'
-          interpreter['properties']['spark.user'] = 'hive'
-          interpreter['properties']['spark.password'] = ''
-          interpreter['properties']['spark.proxy.user.property'] = 
'hive.server2.proxy.user'
-          interpreter['properties']['spark.url'] = 'jdbc:hive2://' + \
-              params.spark_thrift_server_hosts + ':' + 
params.spark_hive_thrift_port + '/'
-          if params.spark_hive_principal:
-            interpreter['properties']['spark.url'] += ';principal=' + 
params.spark_hive_principal
-
-        if params.spark2_thrift_server_hosts:
-          interpreter['properties']['spark2.driver'] = 
'org.apache.hive.jdbc.HiveDriver'
-          interpreter['properties']['spark2.user'] = 'hive'
-          interpreter['properties']['spark2.password'] = ''
-          interpreter['properties']['spark2.proxy.user.property'] = 
'hive.server2.proxy.user'
-          interpreter['properties']['spark2.url'] = 'jdbc:hive2://' + \
-              params.spark2_thrift_server_hosts + ':' + 
params.spark2_hive_thrift_port + '/'
-          if params.spark_hive_principal:
-            interpreter['properties']['spark2.url'] += ';principal=' + 
params.spark2_hive_principal
-
-        if params.zookeeper_znode_parent \
-                and params.hbase_zookeeper_quorum:
-            interpreter['properties']['phoenix.driver'] = 
'org.apache.phoenix.jdbc.PhoenixDriver'
-            interpreter['properties']['phoenix.hbase.client.retries.number'] = 
'1'
-            interpreter['properties']['phoenix.user'] = 'phoenixuser'
-            interpreter['properties']['phoenix.password'] = ''
-            interpreter['properties']['phoenix.url'] = "jdbc:phoenix:" + \
-                                                    
params.hbase_zookeeper_quorum + ':' + \
-                                                    
params.zookeeper_znode_parent
-
-      elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy':
-        if params.livy_livyserver_host:
-          interpreter['properties']['zeppelin.livy.url'] = "http://"; + 
params.livy_livyserver_host + \
-                                                           ":" + 
params.livy_livyserver_port
-        else:
-          del interpreter_settings[setting_key]
-
-      elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy2':
-        if params.livy2_livyserver_host:
-          interpreter['properties']['zeppelin.livy.url'] = "http://"; + 
params.livy2_livyserver_host + \
-                                                           ":" + 
params.livy2_livyserver_port
-        else:
-          del interpreter_settings[setting_key]
-
-
-      elif interpreter['group'] == 'spark' and interpreter['name'] == 'spark':
-        if 'spark-env' in params.config['configurations']:
-          interpreter['properties']['master'] = "yarn-client"
-          interpreter['properties']['SPARK_HOME'] = 
"/usr/hdp/current/spark-client/"
-        else:
-          del interpreter_settings[setting_key]
-
-      elif interpreter['group'] == 'spark' and interpreter['name'] == 'spark2':
-        if 'spark2-env' in params.config['configurations']:
-          interpreter['properties']['master'] = "yarn-client"
-          interpreter['properties']['SPARK_HOME'] = 
"/usr/hdp/current/spark2-client/"
-        else:
-          del interpreter_settings[setting_key]
-
-    self.set_interpreter_settings(config_data)
-    self.update_kerberos_properties()
-
-  def create_interpreter_json(self):
-    import interpreter_json_template
-    import params
-
-    interpreter_json = interpreter_json_template.template
-    File(format("{params.conf_dir}/interpreter.json"), 
content=interpreter_json,
-         owner=params.zeppelin_user, group=params.zeppelin_group)
-
-  def get_zeppelin_spark_dependencies(self):
-    import params
-    return glob.glob(params.zeppelin_dir + 
'/interpreter/spark/dep/zeppelin-spark-dependencies*.jar')
-
-  def get_spark2_interpreter_config(self):
-    import spark2_config_template
-    import json
-
-    return json.loads(spark2_config_template.template)
-
-  def get_livy2_interpreter_config(self):
-    import livy2_config_template
-    import json
-
-    return json.loads(livy2_config_template.template)
-
-if __name__ == "__main__":
-  Master().execute()

Reply via email to