Repository: ambari
Updated Branches:
  refs/heads/trunk 8c104df93 -> 190094ba2


http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py
new file mode 100644
index 0000000..6a98919
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/interpreter_json_template.py
@@ -0,0 +1,361 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+template = '''
+{
+  "interpreterSettings": {
+    "2CKEKWY8Z": {
+      "id": "2CKEKWY8Z",
+      "name": "angular",
+      "group": "angular",
+      "properties": {},
+      "status": "READY",
+      "interpreterGroup": [
+        {
+          "name": "angular",
+          "class": "org.apache.zeppelin.angular.AngularInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "editOnDblClick": true
+          }
+        }
+      ],
+      "dependencies": [],
+      "option": {
+        "remote": true,
+        "port": -1,
+        "perNote": "shared",
+        "perUser": "shared",
+        "isExistingProcess": false,
+        "setPermission": false,
+        "users": [],
+        "isUserImpersonate": false
+      }
+    },
+    "2CKX8WPU1": {
+      "id": "2CKX8WPU1",
+      "name": "spark",
+      "group": "spark",
+      "properties": {
+        "spark.executor.memory": "512m",
+        "args": "",
+        "zeppelin.spark.printREPLOutput": "true",
+        "spark.cores.max": "",
+        "zeppelin.dep.additionalRemoteRepository": 
"spark-packages,http://dl.bintray.com/spark-packages/maven,false;";,
+        "zeppelin.spark.sql.stacktrace": "false",
+        "zeppelin.spark.importImplicit": "true",
+        "zeppelin.spark.concurrentSQL": "false",
+        "zeppelin.spark.useHiveContext": "true",
+        "zeppelin.pyspark.python": "python",
+        "zeppelin.dep.localrepo": "local-repo",
+        "zeppelin.R.knitr": "true",
+        "zeppelin.spark.maxResult": "1000",
+        "master": "yarn-client",
+        "spark.app.name": "Zeppelin",
+        "zeppelin.R.image.width": "100%",
+        "zeppelin.R.render.options": "out.format \u003d \u0027html\u0027, 
comment \u003d NA, echo \u003d FALSE, results \u003d \u0027asis\u0027, message 
\u003d F, warning \u003d F",
+        "zeppelin.R.cmd": "R"
+      },
+      "status": "READY",
+      "interpreterGroup": [
+        {
+          "name": "spark",
+          "class": "org.apache.zeppelin.spark.SparkInterpreter",
+          "defaultInterpreter": true,
+          "editor": {
+            "language": "scala"
+          }
+        },
+        {
+          "name": "sql",
+          "class": "org.apache.zeppelin.spark.SparkSqlInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "sql"
+          }
+        },
+        {
+          "name": "dep",
+          "class": "org.apache.zeppelin.spark.DepInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "scala"
+          }
+        },
+        {
+          "name": "pyspark",
+          "class": "org.apache.zeppelin.spark.PySparkInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "python"
+          }
+        },
+        {
+          "name": "r",
+          "class": "org.apache.zeppelin.spark.SparkRInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "r"
+          }
+        }
+      ],
+      "dependencies": [],
+      "option": {
+        "remote": true,
+        "port": -1,
+        "perNote": "shared",
+        "perUser": "shared",
+        "isExistingProcess": false,
+        "setPermission": false,
+        "users": [],
+        "isUserImpersonate": false
+      }
+    },
+    "2CK8A9MEG": {
+      "id": "2CK8A9MEG",
+      "name": "jdbc",
+      "group": "jdbc",
+      "properties": {
+        "default.password": "",
+        "zeppelin.jdbc.auth.type": "",
+        "common.max_count": "1000",
+        "zeppelin.jdbc.principal": "",
+        "default.user": "gpadmin",
+        "default.url": "jdbc:postgresql://localhost:5432/",
+        "default.driver": "org.postgresql.Driver",
+        "zeppelin.jdbc.keytab.location": "",
+        "zeppelin.jdbc.concurrent.use": "true",
+        "zeppelin.jdbc.concurrent.max_connection": "10"
+      },
+      "status": "READY",
+      "interpreterGroup": [
+        {
+          "name": "sql",
+          "class": "org.apache.zeppelin.jdbc.JDBCInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "sql",
+            "editOnDblClick": false
+          }
+        }
+      ],
+      "dependencies": [],
+      "option": {
+        "remote": true,
+        "port": -1,
+        "perNote": "shared",
+        "perUser": "shared",
+        "isExistingProcess": false,
+        "setPermission": false,
+        "users": [],
+        "isUserImpersonate": false
+      }
+    },
+    "2CKX6DGQZ": {
+      "id": "2CKX6DGQZ",
+      "name": "livy",
+      "group": "livy",
+      "properties": {
+        "zeppelin.livy.pull_status.interval.millis": "1000",
+        "livy.spark.executor.memory": "",
+        "zeppelin.livy.session.create_timeout": "120",
+        "zeppelin.livy.principal": "",
+        "zeppelin.livy.spark.sql.maxResult": "1000",
+        "zeppelin.livy.keytab": "",
+        "zeppelin.livy.concurrentSQL": "false",
+        "zeppelin.livy.spark.sql.field.truncate": "true",
+        "livy.spark.executor.cores": "",
+        "zeppelin.livy.displayAppInfo": "false",
+        "zeppelin.livy.url": "http://localhost:8998";,
+        "livy.spark.dynamicAllocation.minExecutors": "",
+        "livy.spark.driver.cores": "",
+        "livy.spark.jars.packages": "",
+        "livy.spark.dynamicAllocation.enabled": "",
+        "livy.spark.executor.instances": "",
+        "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": "",
+        "livy.spark.dynamicAllocation.maxExecutors": "",
+        "livy.spark.dynamicAllocation.initialExecutors": "",
+        "livy.spark.driver.memory": ""
+      },
+      "status": "READY",
+      "interpreterGroup": [
+        {
+          "name": "spark",
+          "class": "org.apache.zeppelin.livy.LivySparkInterpreter",
+          "defaultInterpreter": true,
+          "editor": {
+            "language": "scala",
+            "editOnDblClick": false
+          }
+        },
+        {
+          "name": "sql",
+          "class": "org.apache.zeppelin.livy.LivySparkSQLInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "sql",
+            "editOnDblClick": false
+          }
+        },
+        {
+          "name": "pyspark",
+          "class": "org.apache.zeppelin.livy.LivyPySparkInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "python",
+            "editOnDblClick": false
+          }
+        },
+        {
+          "name": "pyspark3",
+          "class": "org.apache.zeppelin.livy.LivyPySpark3Interpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "python",
+            "editOnDblClick": false
+          }
+        },
+        {
+          "name": "sparkr",
+          "class": "org.apache.zeppelin.livy.LivySparkRInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "r",
+            "editOnDblClick": false
+          }
+        }
+      ],
+      "dependencies": [],
+      "option": {
+        "remote": true,
+        "port": -1,
+        "perNote": "shared",
+        "perUser": "scoped",
+        "isExistingProcess": false,
+        "setPermission": false,
+        "users": [],
+        "isUserImpersonate": false
+      }
+    },
+    "2CKAY1A8Y": {
+      "id": "2CKAY1A8Y",
+      "name": "md",
+      "group": "md",
+      "properties": {
+        "markdown.parser.type": "pegdown"
+      },
+      "status": "READY",
+      "interpreterGroup": [
+        {
+          "name": "md",
+          "class": "org.apache.zeppelin.markdown.Markdown",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "markdown",
+            "editOnDblClick": true
+          }
+        }
+      ],
+      "dependencies": [],
+      "option": {
+        "remote": true,
+        "port": -1,
+        "perNote": "shared",
+        "perUser": "shared",
+        "isExistingProcess": false,
+        "setPermission": false,
+        "users": [],
+        "isUserImpersonate": false
+      }
+    },
+    "2CHS8UYQQ": {
+      "id": "2CHS8UYQQ",
+      "name": "sh",
+      "group": "sh",
+      "properties": {
+        "zeppelin.shell.keytab.location": "",
+        "shell.command.timeout.millisecs": "60000",
+        "zeppelin.shell.principal": "",
+        "zeppelin.shell.auth.type": ""
+      },
+      "status": "READY",
+      "interpreterGroup": [
+        {
+          "name": "sh",
+          "class": "org.apache.zeppelin.shell.ShellInterpreter",
+          "defaultInterpreter": false,
+          "editor": {
+            "language": "sh",
+            "editOnDblClick": false
+          }
+        }
+      ],
+      "dependencies": [],
+      "option": {
+        "remote": true,
+        "port": -1,
+        "perNote": "shared",
+        "perUser": "shared",
+        "isExistingProcess": false,
+        "setPermission": false,
+        "users": [],
+        "isUserImpersonate": false
+      }
+    }
+  },
+  "interpreterBindings": {},
+  "interpreterRepositories": [
+    {
+      "id": "central",
+      "type": "default",
+      "url": "http://repo1.maven.org/maven2/";,
+      "releasePolicy": {
+        "enabled": true,
+        "updatePolicy": "daily",
+        "checksumPolicy": "warn"
+      },
+      "snapshotPolicy": {
+        "enabled": true,
+        "updatePolicy": "daily",
+        "checksumPolicy": "warn"
+      },
+      "mirroredRepositories": [],
+      "repositoryManager": false
+    },
+    {
+      "id": "local",
+      "type": "default",
+      "url": "file:///home/zeppelin/.m2/repository",
+      "releasePolicy": {
+        "enabled": true,
+        "updatePolicy": "daily",
+        "checksumPolicy": "warn"
+      },
+      "snapshotPolicy": {
+        "enabled": true,
+        "updatePolicy": "daily",
+        "checksumPolicy": "warn"
+      },
+      "mirroredRepositories": [],
+      "repositoryManager": false
+    }
+  ]
+}
+'''

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/livy2_config_template.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/livy2_config_template.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/livy2_config_template.py
new file mode 100644
index 0000000..71d3817
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/livy2_config_template.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+template = '''
+{
+  "id": "2C8A4SZ9T_livy2",
+  "status": "READY",
+  "group": "livy",
+  "name": "livy2",
+  "properties": {
+    "zeppelin.livy.keytab": "",
+    "zeppelin.livy.spark.sql.maxResult": "1000",
+    "livy.spark.executor.instances": "",
+    "livy.spark.executor.memory": "",
+    "livy.spark.dynamicAllocation.enabled": "",
+    "livy.spark.dynamicAllocation.cachedExecutorIdleTimeout": "",
+    "livy.spark.dynamicAllocation.initialExecutors": "",
+    "zeppelin.livy.session.create_timeout": "120",
+    "livy.spark.driver.memory": "",
+    "zeppelin.livy.displayAppInfo": "false",
+    "livy.spark.jars.packages": "",
+    "livy.spark.dynamicAllocation.maxExecutors": "",
+    "zeppelin.livy.concurrentSQL": "false",
+    "zeppelin.livy.principal": "",
+    "livy.spark.executor.cores": "",
+    "zeppelin.livy.url": "http://localhost:8998";,
+    "zeppelin.livy.pull_status.interval.millis": "1000",
+    "livy.spark.driver.cores": "",
+    "livy.spark.dynamicAllocation.minExecutors": ""
+  },
+  "interpreterGroup": [
+    {
+      "class": "org.apache.zeppelin.livy.LivySparkInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "scala"
+      },
+      "name": "spark",
+      "defaultInterpreter": false
+    },
+    {
+      "class": "org.apache.zeppelin.livy.LivySparkSQLInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "sql"
+      },
+      "name": "sql",
+      "defaultInterpreter": false
+    },
+    {
+      "class": "org.apache.zeppelin.livy.LivyPySparkInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "python"
+      },
+      "name": "pyspark",
+      "defaultInterpreter": false
+              },
+    {
+      "class": "org.apache.zeppelin.livy.LivyPySpark3Interpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "python"
+      },
+      "name": "pyspark3",
+      "defaultInterpreter": false
+    },
+    {
+      "class": "org.apache.zeppelin.livy.LivySparkRInterpreter",
+      "editor": {
+        "editOnDblClick": false,
+        "language": "r"
+      },
+      "name": "sparkr",
+      "defaultInterpreter": false
+    }
+  ],
+  "dependencies": [],
+  "option": {
+    "setPermission": false,
+    "remote": true,
+    "users": [],
+    "isExistingProcess": false,
+    "perUser": "scoped",
+    "isUserImpersonate": false,
+    "perNote": "shared",
+    "port": -1
+  }
+}
+'''

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
new file mode 100644
index 0000000..ba73d10
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
@@ -0,0 +1,522 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import glob
+import os
+
+from resource_management.core import shell, sudo
+from resource_management.core.logger import Logger
+from resource_management.core.resources import Directory
+from resource_management.core.resources.system import Execute, File
+from resource_management.core.source import InlineTemplate
+from resource_management.libraries import XmlConfig
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.check_process_status import 
check_process_status
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.stack_features import 
check_stack_feature
+from resource_management.libraries.functions.version import 
format_stack_version
+from resource_management.libraries.script.script import Script
+
+
+class Master(Script):
+  def install(self, env):
+    import params
+    env.set_params(params)
+    self.install_packages(env)
+
+    self.create_zeppelin_log_dir(env)
+
+    if params.spark_version:
+      Execute('echo spark_version:' + str(params.spark_version) + ' detected 
for spark_home: '
+              + params.spark_home + ' >> ' + params.zeppelin_log_file, 
user=params.zeppelin_user)
+    if params.spark2_version:
+      Execute('echo spark2_version:' + str(params.spark2_version) + ' detected 
for spark2_home: '
+              + params.spark2_home + ' >> ' + params.zeppelin_log_file, 
user=params.zeppelin_user)
+
+  def create_zeppelin_dir(self, params):
+    params.HdfsResource(format("/user/{zeppelin_user}"),
+                        type="directory",
+                        action="create_on_execute",
+                        owner=params.zeppelin_user,
+                        recursive_chown=True,
+                        recursive_chmod=True
+                        )
+    params.HdfsResource(format("/user/{zeppelin_user}/test"),
+                        type="directory",
+                        action="create_on_execute",
+                        owner=params.zeppelin_user,
+                        recursive_chown=True,
+                        recursive_chmod=True
+                        )
+    params.HdfsResource(format("/apps/zeppelin"),
+                        type="directory",
+                        action="create_on_execute",
+                        owner=params.zeppelin_user,
+                        recursive_chown=True,
+                        recursive_chmod=True
+                        )
+
+    spark_deps_full_path = self.get_zeppelin_spark_dependencies()[0]
+    spark_dep_file_name = os.path.basename(spark_deps_full_path)
+
+    params.HdfsResource(params.spark_jar_dir + "/" + spark_dep_file_name,
+                        type="file",
+                        action="create_on_execute",
+                        source=spark_deps_full_path,
+                        group=params.zeppelin_group,
+                        owner=params.zeppelin_user,
+                        mode=0444,
+                        replace_existing_files=True,
+                        )
+
+    params.HdfsResource(None, action="execute")
+
+  def create_zeppelin_log_dir(self, env):
+    import params
+    env.set_params(params)
+    Directory([params.zeppelin_log_dir],
+              owner=params.zeppelin_user,
+              group=params.zeppelin_group,
+              cd_access="a",
+              create_parents=True,
+              mode=0755
+              )
+
+  def create_zeppelin_hdfs_conf_dir(self, env):
+    import params
+    env.set_params(params)
+    Directory([params.external_dependency_conf],
+              owner=params.zeppelin_user,
+              group=params.zeppelin_group,
+              cd_access="a",
+              create_parents=True,
+              mode=0755
+              )
+
+  def chown_zeppelin_pid_dir(self, env):
+    import params
+    env.set_params(params)
+    Execute(("chown", "-R", format("{zeppelin_user}") + ":" + 
format("{zeppelin_group}"), params.zeppelin_pid_dir),
+            sudo=True)
+
+  def configure(self, env):
+    import params
+    import status_params
+    env.set_params(params)
+    env.set_params(status_params)
+    self.create_zeppelin_log_dir(env)
+
+    # create the pid and zeppelin dirs
+    Directory([params.zeppelin_pid_dir, params.zeppelin_dir],
+              owner=params.zeppelin_user,
+              group=params.zeppelin_group,
+              cd_access="a",
+              create_parents=True,
+              mode=0755
+    )
+    self.chown_zeppelin_pid_dir(env)
+
+    # write out zeppelin-site.xml
+    XmlConfig("zeppelin-site.xml",
+              conf_dir=params.conf_dir,
+              
configurations=params.config['configurations']['zeppelin-config'],
+              owner=params.zeppelin_user,
+              group=params.zeppelin_group
+              )
+    # write out zeppelin-env.sh
+    env_content = InlineTemplate(params.zeppelin_env_content)
+    File(format("{params.conf_dir}/zeppelin-env.sh"), content=env_content,
+         owner=params.zeppelin_user, group=params.zeppelin_group)
+
+    # write out shiro.ini
+    shiro_ini_content = InlineTemplate(params.shiro_ini_content)
+    File(format("{params.conf_dir}/shiro.ini"), content=shiro_ini_content,
+         owner=params.zeppelin_user, group=params.zeppelin_group)
+
+    # write out log4j.properties
+    File(format("{params.conf_dir}/log4j.properties"), 
content=params.log4j_properties_content,
+         owner=params.zeppelin_user, group=params.zeppelin_group)
+
+    self.create_zeppelin_hdfs_conf_dir(env)
+
+    if len(params.hbase_master_hosts) > 0 and params.is_hbase_installed:
+      # copy hbase-site.xml
+      XmlConfig("hbase-site.xml",
+              conf_dir=params.external_dependency_conf,
+              configurations=params.config['configurations']['hbase-site'],
+              
configuration_attributes=params.config['configuration_attributes']['hbase-site'],
+              owner=params.zeppelin_user,
+              group=params.zeppelin_group,
+              mode=0644)
+
+      XmlConfig("hdfs-site.xml",
+                conf_dir=params.external_dependency_conf,
+                configurations=params.config['configurations']['hdfs-site'],
+                
configuration_attributes=params.config['configuration_attributes']['hdfs-site'],
+                owner=params.zeppelin_user,
+                group=params.zeppelin_group,
+                mode=0644)
+
+      XmlConfig("core-site.xml",
+                conf_dir=params.external_dependency_conf,
+                configurations=params.config['configurations']['core-site'],
+                
configuration_attributes=params.config['configuration_attributes']['core-site'],
+                owner=params.zeppelin_user,
+                group=params.zeppelin_group,
+                mode=0644)
+
+  def check_and_copy_notebook_in_hdfs(self, params):
+    if 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir'].startswith("/"):
+      notebook_directory = 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir']
+    else:
+      notebook_directory = "/user/" + format("{zeppelin_user}") + "/" + \
+                           
params.config['configurations']['zeppelin-config']['zeppelin.notebook.dir']
+
+    kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
+    kinit_if_needed = format("{kinit_path_local} -kt 
{zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
+
+    notebook_directory_exists = shell.call(format("{kinit_if_needed} hdfs 
--config {hadoop_conf_dir} dfs -test -e {notebook_directory};echo $?"),
+                                           user=params.zeppelin_user)[1]
+
+    #if there is no kerberos setup then the string will contain "-bash: kinit: 
command not found"
+    if "\n" in notebook_directory_exists:
+      notebook_directory_exists = notebook_directory_exists.split("\n")[1]
+
+    # '1' means it does not exists
+    if notebook_directory_exists == '1':
+      # hdfs dfs -mkdir {notebook_directory}
+      params.HdfsResource(format("{notebook_directory}"),
+                          type="directory",
+                          action="create_on_execute",
+                          owner=params.zeppelin_user,
+                          recursive_chown=True,
+                          recursive_chmod=True
+                          )
+
+      # hdfs dfs -put /usr/hdp/current/zeppelin-server/notebook/ 
{notebook_directory}
+      params.HdfsResource(format("{notebook_directory}"),
+                            type="directory",
+                            action="create_on_execute",
+                            source=params.notebook_dir,
+                            owner=params.zeppelin_user,
+                            recursive_chown=True,
+                            recursive_chmod=True
+                            )
+
+
+  def stop(self, env, upgrade_type=None):
+    import params
+    self.create_zeppelin_log_dir(env)
+    self.chown_zeppelin_pid_dir(env)
+    Execute(params.zeppelin_dir + '/bin/zeppelin-daemon.sh stop >> ' + 
params.zeppelin_log_file,
+            user=params.zeppelin_user)
+
+  def start(self, env, upgrade_type=None):
+    import params
+    import status_params
+    self.configure(env)
+
+    Execute(("chown", "-R", format("{zeppelin_user}") + ":" + 
format("{zeppelin_group}"), "/etc/zeppelin"),
+            sudo=True)
+    Execute(("chown", "-R", format("{zeppelin_user}") + ":" + 
format("{zeppelin_group}"),
+             os.path.join(params.zeppelin_dir, "notebook")), sudo=True)
+
+    if 'zeppelin.notebook.storage' in 
params.config['configurations']['zeppelin-config'] \
+        and 
params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] 
== 'org.apache.zeppelin.notebook.repo.HdfsNotebookRepo':
+      self.check_and_copy_notebook_in_hdfs(params)
+
+    if params.security_enabled:
+        zeppelin_kinit_cmd = format("{kinit_path_local} -kt 
{zeppelin_kerberos_keytab} {zeppelin_kerberos_principal}; ")
+        Execute(zeppelin_kinit_cmd, user=params.zeppelin_user)
+
+    zeppelin_spark_dependencies = self.get_zeppelin_spark_dependencies()
+    if zeppelin_spark_dependencies and 
os.path.exists(zeppelin_spark_dependencies[0]):
+      self.create_zeppelin_dir(params)
+
+    # if first_setup:
+    if not glob.glob(params.conf_dir + "/interpreter.json") and \
+      not os.path.exists(params.conf_dir + "/interpreter.json"):
+      self.create_interpreter_json()
+      self.update_zeppelin_interpreter()
+
+    if params.zeppelin_interpreter_config_upgrade == True:
+      self.reset_interpreter_settings()
+      self.update_zeppelin_interpreter()
+
+    Execute(params.zeppelin_dir + '/bin/zeppelin-daemon.sh restart >> '
+            + params.zeppelin_log_file, user=params.zeppelin_user)
+    pidfile = glob.glob(os.path.join(status_params.zeppelin_pid_dir,
+                                     'zeppelin-' + params.zeppelin_user + 
'*.pid'))[0]
+    Logger.info(format("Pid file is: {pidfile}"))
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+
+    try:
+        pid_file = glob.glob(status_params.zeppelin_pid_dir + '/zeppelin-' +
+                             status_params.zeppelin_user + '*.pid')[0]
+    except IndexError:
+        pid_file = ''
+    check_process_status(pid_file)
+
+  def reset_interpreter_settings(self):
+    import json
+    import interpreter_json_template
+    interpreter_json_template = 
json.loads(interpreter_json_template.template)['interpreterSettings']
+    config_data = self.get_interpreter_settings()
+    interpreter_settings = config_data['interpreterSettings']
+
+    for setting_key in interpreter_json_template.keys():
+      if setting_key not in interpreter_settings:
+        interpreter_settings[setting_key] = interpreter_json_template[
+          setting_key]
+
+    self.set_interpreter_settings(config_data)
+
+  def get_interpreter_settings(self):
+    import params
+    import json
+
+    interpreter_config = os.path.join(params.conf_dir, "interpreter.json")
+    config_content = sudo.read_file(interpreter_config)
+    config_data = json.loads(config_content)
+    return config_data
+
+  def pre_upgrade_restart(self, env, upgrade_type=None):
+    Logger.info("Executing Stack Upgrade pre-restart")
+    import params
+    env.set_params(params)
+
+    if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
format_stack_version(params.version)):
+      stack_select.select_packages(params.version)
+
+  def set_interpreter_settings(self, config_data):
+    import params
+    import json
+
+    interpreter_config = os.path.join(params.conf_dir, "interpreter.json")
+    File(interpreter_config,
+         group=params.zeppelin_group,
+         owner=params.zeppelin_user,
+         content=json.dumps(config_data, indent=2)
+         )
+
+  def update_kerberos_properties(self):
+    import params
+    config_data = self.get_interpreter_settings()
+    interpreter_settings = config_data['interpreterSettings']
+    for interpreter_setting in interpreter_settings:
+      interpreter = interpreter_settings[interpreter_setting]
+      if interpreter['group'] == 'livy' and params.livy_livyserver_host:
+        if params.zeppelin_kerberos_principal and 
params.zeppelin_kerberos_keytab and params.security_enabled:
+          interpreter['properties']['zeppelin.livy.principal'] = 
params.zeppelin_kerberos_principal
+          interpreter['properties']['zeppelin.livy.keytab'] = 
params.zeppelin_kerberos_keytab
+        else:
+          interpreter['properties']['zeppelin.livy.principal'] = ""
+          interpreter['properties']['zeppelin.livy.keytab'] = ""
+      elif interpreter['group'] == 'spark':
+        if params.zeppelin_kerberos_principal and 
params.zeppelin_kerberos_keytab and params.security_enabled:
+          interpreter['properties']['spark.yarn.principal'] = 
params.zeppelin_kerberos_principal
+          interpreter['properties']['spark.yarn.keytab'] = 
params.zeppelin_kerberos_keytab
+        else:
+          interpreter['properties']['spark.yarn.principal'] = ""
+          interpreter['properties']['spark.yarn.keytab'] = ""
+      elif interpreter['group'] == 'jdbc':
+        if params.zeppelin_kerberos_principal and 
params.zeppelin_kerberos_keytab and params.security_enabled:
+          interpreter['properties']['zeppelin.jdbc.auth.type'] = "KERBEROS"
+          interpreter['properties']['zeppelin.jdbc.principal'] = 
params.zeppelin_kerberos_principal
+          interpreter['properties']['zeppelin.jdbc.keytab.location'] = 
params.zeppelin_kerberos_keytab
+          if params.zookeeper_znode_parent \
+              and params.hbase_zookeeper_quorum \
+              and 'phoenix.url' in interpreter['properties'] \
+              and params.zookeeper_znode_parent not in 
interpreter['properties']['phoenix.url']:
+            interpreter['properties']['phoenix.url'] = "jdbc:phoenix:" + \
+                                                       
params.hbase_zookeeper_quorum + ':' + \
+                                                       
params.zookeeper_znode_parent
+        else:
+          interpreter['properties']['zeppelin.jdbc.auth.type'] = "SIMPLE"
+          interpreter['properties']['zeppelin.jdbc.principal'] = ""
+          interpreter['properties']['zeppelin.jdbc.keytab.location'] = ""
+      elif interpreter['group'] == 'sh':
+        if params.zeppelin_kerberos_principal and 
params.zeppelin_kerberos_keytab and params.security_enabled:
+          interpreter['properties']['zeppelin.shell.auth.type'] = "KERBEROS"
+          interpreter['properties']['zeppelin.shell.principal'] = 
params.zeppelin_kerberos_principal
+          interpreter['properties']['zeppelin.shell.keytab.location'] = 
params.zeppelin_kerberos_keytab
+        else:
+          interpreter['properties']['zeppelin.shell.auth.type'] = ""
+          interpreter['properties']['zeppelin.shell.principal'] = ""
+          interpreter['properties']['zeppelin.shell.keytab.location'] = ""
+
+    self.set_interpreter_settings(config_data)
+
+  def update_zeppelin_interpreter(self):
+    import params
+    config_data = self.get_interpreter_settings()
+    interpreter_settings = config_data['interpreterSettings']
+
+    if 'spark2-defaults' in params.config['configurations']:
+      spark2_config = self.get_spark2_interpreter_config()
+      config_id = spark2_config["id"]
+      interpreter_settings[config_id] = spark2_config
+
+    if params.livy2_livyserver_host:
+      livy2_config = self.get_livy2_interpreter_config()
+      config_id = livy2_config["id"]
+      interpreter_settings[config_id] = livy2_config
+
+    if params.zeppelin_interpreter:
+      settings_to_delete = []
+      for settings_key, interpreter in interpreter_settings.items():
+        if interpreter['group'] not in params.zeppelin_interpreter:
+          settings_to_delete.append(settings_key)
+
+      for key in settings_to_delete:
+        del interpreter_settings[key]
+
+    hive_interactive_properties_key = 'hive_interactive'
+    for setting_key in interpreter_settings.keys():
+      interpreter = interpreter_settings[setting_key]
+      if interpreter['group'] == 'jdbc':
+        interpreter['dependencies'] = []
+
+        if not params.hive_server_host and 
params.hive_server_interactive_hosts:
+          hive_interactive_properties_key = 'hive'
+
+        if params.hive_server_host:
+          interpreter['properties']['hive.driver'] = 
'org.apache.hive.jdbc.HiveDriver'
+          interpreter['properties']['hive.user'] = 'hive'
+          interpreter['properties']['hive.password'] = ''
+          interpreter['properties']['hive.proxy.user.property'] = 
'hive.server2.proxy.user'
+          if params.hive_server2_support_dynamic_service_discovery:
+            interpreter['properties']['hive.url'] = 'jdbc:hive2://' + \
+                                                 params.hive_zookeeper_quorum 
+ \
+                                                 '/;' + 
'serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=' + \
+                                                    
params.hive_zookeeper_namespace
+          else:
+            interpreter['properties']['hive.url'] = 'jdbc:hive2://' + \
+                                                 params.hive_server_host + \
+                                                     ':' + 
params.hive_server_port
+        if params.hive_server_interactive_hosts:
+          interpreter['properties'][hive_interactive_properties_key + 
'.driver'] = 'org.apache.hive.jdbc.HiveDriver'
+          interpreter['properties'][hive_interactive_properties_key + '.user'] 
= 'hive'
+          interpreter['properties'][hive_interactive_properties_key + 
'.password'] = ''
+          interpreter['properties'][hive_interactive_properties_key + 
'.proxy.user.property'] = 'hive.server2.proxy.user'
+          if params.hive_server2_support_dynamic_service_discovery:
+            interpreter['properties'][hive_interactive_properties_key + 
'.url'] = 'jdbc:hive2://' + \
+                                                    
params.hive_zookeeper_quorum + \
+                                                    '/;' + 
'serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=' + \
+                                                    
params.hive_interactive_zookeeper_namespace
+          else:
+            interpreter['properties'][hive_interactive_properties_key + 
'.url'] = 'jdbc:hive2://' + \
+                                                    
params.hive_server_interactive_hosts + \
+                                                    ':' + 
params.hive_server_port
+
+        if params.spark_thrift_server_hosts:
+          interpreter['properties']['spark.driver'] = 
'org.apache.hive.jdbc.HiveDriver'
+          interpreter['properties']['spark.user'] = 'hive'
+          interpreter['properties']['spark.password'] = ''
+          interpreter['properties']['spark.proxy.user.property'] = 
'hive.server2.proxy.user'
+          interpreter['properties']['spark.url'] = 'jdbc:hive2://' + \
+              params.spark_thrift_server_hosts + ':' + 
params.spark_hive_thrift_port + '/'
+          if params.spark_hive_principal:
+            interpreter['properties']['spark.url'] += ';principal=' + 
params.spark_hive_principal
+
+        if params.spark2_thrift_server_hosts:
+          interpreter['properties']['spark2.driver'] = 
'org.apache.hive.jdbc.HiveDriver'
+          interpreter['properties']['spark2.user'] = 'hive'
+          interpreter['properties']['spark2.password'] = ''
+          interpreter['properties']['spark2.proxy.user.property'] = 
'hive.server2.proxy.user'
+          interpreter['properties']['spark2.url'] = 'jdbc:hive2://' + \
+              params.spark2_thrift_server_hosts + ':' + 
params.spark2_hive_thrift_port + '/'
+          if params.spark_hive_principal:
+            interpreter['properties']['spark2.url'] += ';principal=' + 
params.spark2_hive_principal
+
+        if params.zookeeper_znode_parent \
+                and params.hbase_zookeeper_quorum:
+            interpreter['properties']['phoenix.driver'] = 
'org.apache.phoenix.jdbc.PhoenixDriver'
+            interpreter['properties']['phoenix.hbase.client.retries.number'] = 
'1'
+            interpreter['properties']['phoenix.user'] = 'phoenixuser'
+            interpreter['properties']['phoenix.password'] = ''
+            interpreter['properties']['phoenix.url'] = "jdbc:phoenix:" + \
+                                                    
params.hbase_zookeeper_quorum + ':' + \
+                                                    
params.zookeeper_znode_parent
+
+      elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy':
+        if params.livy_livyserver_host:
+          interpreter['properties']['zeppelin.livy.url'] = "http://"; + 
params.livy_livyserver_host + \
+                                                           ":" + 
params.livy_livyserver_port
+        else:
+          del interpreter_settings[setting_key]
+
+      elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy2':
+        if params.livy2_livyserver_host:
+          interpreter['properties']['zeppelin.livy.url'] = "http://"; + 
params.livy2_livyserver_host + \
+                                                           ":" + 
params.livy2_livyserver_port
+        else:
+          del interpreter_settings[setting_key]
+
+
+      elif interpreter['group'] == 'spark' and interpreter['name'] == 'spark':
+        if 'spark-env' in params.config['configurations']:
+          interpreter['properties']['master'] = "yarn-client"
+          interpreter['properties']['SPARK_HOME'] = 
"/usr/hdp/current/spark-client/"
+        else:
+          del interpreter_settings[setting_key]
+
+      elif interpreter['group'] == 'spark' and interpreter['name'] == 'spark2':
+        if 'spark2-env' in params.config['configurations']:
+          interpreter['properties']['master'] = "yarn-client"
+          interpreter['properties']['SPARK_HOME'] = 
"/usr/hdp/current/spark2-client/"
+        else:
+          del interpreter_settings[setting_key]
+
+    self.set_interpreter_settings(config_data)
+    self.update_kerberos_properties()
+
+  def create_interpreter_json(self):
+    import interpreter_json_template
+    import params
+
+    interpreter_json = interpreter_json_template.template
+    File(format("{params.conf_dir}/interpreter.json"), 
content=interpreter_json,
+         owner=params.zeppelin_user, group=params.zeppelin_group)
+
+  def get_zeppelin_spark_dependencies(self):
+    import params
+    return glob.glob(params.zeppelin_dir + 
'/interpreter/spark/dep/zeppelin-spark-dependencies*.jar')
+
+  def get_spark2_interpreter_config(self):
+    import spark2_config_template
+    import json
+
+    return json.loads(spark2_config_template.template)
+
+  def get_livy2_interpreter_config(self):
+    import livy2_config_template
+    import json
+
+    return json.loads(livy2_config_template.template)
+
+if __name__ == "__main__":
+  Master().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
new file mode 100644
index 0000000..3242f26
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
@@ -0,0 +1,258 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import functools
+import os
+import re
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.get_stack_version import 
get_stack_version
+from resource_management.libraries.functions.stack_features import 
check_stack_feature
+from resource_management.libraries.functions.version import 
format_stack_version, get_major_version
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+from resource_management.libraries.script.script import Script
+
+def get_port_from_url(address):
+  if not (address is None):
+    return address.split(':')[-1]
+  else:
+    return address
+
+def extract_spark_version(spark_home):
+  try:
+    with open(spark_home + "/RELEASE") as fline:
+      return re.search('Spark (\d\.\d).+', fline.readline().rstrip()).group(1)
+  except:
+    pass
+  return None
+
+
+# server configurations
+config = Script.get_config()
+stack_root = Script.get_stack_root()
+
+# e.g. 
/var/lib/ambari-agent/cache/stacks/HDP/2.2/services/zeppelin-stack/package
+service_packagedir = os.path.realpath(__file__).split('/scripts')[0]
+
+zeppelin_dirname = 'zeppelin-server'
+
+install_dir = os.path.join(stack_root, "current")
+executor_mem = 
config['configurations']['zeppelin-env']['zeppelin.executor.mem']
+executor_instances = config['configurations']['zeppelin-env'][
+  'zeppelin.executor.instances']
+
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+
+spark_jar_dir = 
config['configurations']['zeppelin-env']['zeppelin.spark.jar.dir']
+spark_jar = format("{spark_jar_dir}/zeppelin-spark-0.5.5-SNAPSHOT.jar")
+setup_view = True
+temp_file = config['configurations']['zeppelin-env']['zeppelin.temp.file']
+
+spark_home = ""
+spark_version = None
+spark2_home = ""
+spark2_version = None
+if 'spark-defaults' in config['configurations']:
+  spark_home = os.path.join(stack_root, "current", 'spark-client')
+  spark_version = extract_spark_version(spark_home)
+if 'spark2-defaults' in config['configurations']:
+  spark2_home = os.path.join(stack_root, "current", 'spark2-client')
+  spark2_version = extract_spark_version(spark2_home)
+
+# New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
+version = default("/commandParams/version", None)
+stack_name = default("/hostLevelParams/stack_name", None)
+
+# params from zeppelin-config
+zeppelin_port = 
str(config['configurations']['zeppelin-config']['zeppelin.server.port'])
+zeppelin_interpreter = None
+if 'zeppelin.interpreter.group.order' in 
config['configurations']['zeppelin-config']:
+  zeppelin_interpreter = str(config['configurations']['zeppelin-config']
+                             ['zeppelin.interpreter.group.order']).split(",")
+
+# params from zeppelin-env
+zeppelin_user = config['configurations']['zeppelin-env']['zeppelin_user']
+zeppelin_group = config['configurations']['zeppelin-env']['zeppelin_group']
+zeppelin_log_dir = config['configurations']['zeppelin-env']['zeppelin_log_dir']
+zeppelin_pid_dir = config['configurations']['zeppelin-env']['zeppelin_pid_dir']
+zeppelin_log_file = os.path.join(zeppelin_log_dir, 'zeppelin-setup.log')
+zeppelin_hdfs_user_dir = format("/user/{zeppelin_user}")
+
+zeppelin_dir = os.path.join(*[install_dir, zeppelin_dirname])
+conf_dir = "/etc/zeppelin/conf"
+external_dependency_conf = "/etc/zeppelin/conf/external-dependency-conf"
+notebook_dir = os.path.join(*[install_dir, zeppelin_dirname, 'notebook'])
+
+# zeppelin-env.sh
+zeppelin_env_content = 
config['configurations']['zeppelin-env']['zeppelin_env_content']
+
+# shiro.ini
+shiro_ini_content = 
config['configurations']['zeppelin-shiro-ini']['shiro_ini_content']
+
+# log4j.properties
+log4j_properties_content = 
config['configurations']['zeppelin-log4j-properties']['log4j_properties_content']
+
+# detect configs
+master_configs = config['clusterHostInfo']
+java64_home = config['hostLevelParams']['java_home']
+ambari_host = str(master_configs['ambari_server_host'][0])
+zeppelin_host = str(master_configs['zeppelin_master_hosts'][0])
+ui_ssl_enabled = config['configurations']['zeppelin-config']['zeppelin.ssl']
+
+# detect HS2 details, if installed
+
+hive_server_host = None
+hive_metastore_host = '0.0.0.0'
+hive_metastore_port = None
+hive_server_port = None
+hive_zookeeper_quorum = None
+hive_server2_support_dynamic_service_discovery = None
+is_hive_installed = False
+hive_zookeeper_namespace = None
+hive_interactive_zookeeper_namespace = None
+
+if 'hive_server_host' in master_configs and 
len(master_configs['hive_server_host']) != 0:
+  is_hive_installed = True
+  spark_hive_properties = {
+    'hive.metastore.uris': 
default('/configurations/hive-site/hive.metastore.uris', '')
+  }
+  hive_server_host = str(master_configs['hive_server_host'][0])
+  hive_metastore_host = str(master_configs['hive_metastore_host'][0])
+  hive_metastore_port = str(
+    get_port_from_url(default('/configurations/hive-site/hive.metastore.uris', 
'')))
+  hive_server_port = 
str(config['configurations']['hive-site']['hive.server2.thrift.http.port'])
+  hive_zookeeper_quorum = 
config['configurations']['hive-site']['hive.zookeeper.quorum']
+  hive_zookeeper_namespace = 
config['configurations']['hive-site']['hive.server2.zookeeper.namespace']
+  hive_server2_support_dynamic_service_discovery = 
config['configurations']['hive-site']['hive.server2.support.dynamic.service.discovery']
+
+hive_server_interactive_hosts = None
+if 'hive_server_interactive_hosts' in master_configs and 
len(master_configs['hive_server_interactive_hosts']) != 0:
+    hive_server_interactive_hosts = 
str(master_configs['hive_server_interactive_hosts'][0])
+    hive_interactive_zookeeper_namespace = 
config['configurations']['hive-interactive-site']['hive.server2.zookeeper.namespace']
+    hive_server_port = 
str(config['configurations']['hive-site']['hive.server2.thrift.http.port'])
+    hive_zookeeper_quorum = 
config['configurations']['hive-site']['hive.zookeeper.quorum']
+    hive_server2_support_dynamic_service_discovery = 
config['configurations']['hive-site']['hive.server2.support.dynamic.service.discovery']
+
+spark_thrift_server_hosts = None
+spark_hive_thrift_port = None
+spark_hive_principal = None
+if 'spark_thriftserver_hosts' in master_configs and 
len(master_configs['spark_thriftserver_hosts']) != 0:
+  spark_thrift_server_hosts = 
str(master_configs['spark_thriftserver_hosts'][0])
+  if config['configurations']['spark-hive-site-override']:
+    spark_hive_thrift_port = 
config['configurations']['spark-hive-site-override']['hive.server2.thrift.port']
+  if config['configurations']['spark-thrift-sparkconf'] and \
+      'spark.sql.hive.hiveserver2.jdbc.url.principal' in 
config['configurations']['spark-thrift-sparkconf']:
+    spark_hive_principal = 
config['configurations']['spark-thrift-sparkconf']['spark.sql.hive.hiveserver2.jdbc.url.principal']
+
+spark2_thrift_server_hosts = None
+spark2_hive_thrift_port = None
+spark2_hive_principal = None
+if 'spark2_thriftserver_hosts' in master_configs and 
len(master_configs['spark2_thriftserver_hosts']) != 0:
+  spark2_thrift_server_hosts = 
str(master_configs['spark2_thriftserver_hosts'][0])
+  if config['configurations']['spark2-hive-site-override']:
+    spark2_hive_thrift_port = 
config['configurations']['spark2-hive-site-override']['hive.server2.thrift.port']
+  if config['configurations']['spark2-thrift-sparkconf'] and \
+      'spark.sql.hive.hiveserver2.jdbc.url.principal' in 
config['configurations']['spark2-thrift-sparkconf']:
+    spark2_hive_principal = 
config['configurations']['spark2-thrift-sparkconf']['spark.sql.hive.hiveserver2.jdbc.url.principal']
+
+
+# detect hbase details if installed
+zookeeper_znode_parent = None
+hbase_zookeeper_quorum = None
+is_hbase_installed = False
+if 'hbase_master_hosts' in master_configs and 'hbase-site' in 
config['configurations']:
+  is_hbase_installed = True
+  zookeeper_znode_parent = 
config['configurations']['hbase-site']['zookeeper.znode.parent']
+  hbase_zookeeper_quorum = 
config['configurations']['hbase-site']['hbase.zookeeper.quorum']
+
+# detect spark queue
+if 'spark-defaults' in config['configurations'] and 'spark.yarn.queue' in 
config['configurations']['spark-defaults']:
+  spark_queue = config['configurations']['spark-defaults']['spark.yarn.queue']
+elif 'spark2-defaults' in config['configurations'] and 'spark.yarn.queue' in 
config['configurations']['spark2-defaults']:
+  spark_queue = config['configurations']['spark2-defaults']['spark.yarn.queue']
+else:
+  spark_queue = 'default'
+
+zeppelin_kerberos_keytab = 
config['configurations']['zeppelin-env']['zeppelin.server.kerberos.keytab']
+zeppelin_kerberos_principal = 
config['configurations']['zeppelin-env']['zeppelin.server.kerberos.principal']
+if 'zeppelin.interpreter.config.upgrade' in 
config['configurations']['zeppelin-config']:
+  zeppelin_interpreter_config_upgrade = 
config['configurations']['zeppelin-config']['zeppelin.interpreter.config.upgrade']
+else:
+  zeppelin_interpreter_config_upgrade = False
+
+# e.g. 2.3
+stack_version_unformatted = config['hostLevelParams']['stack_version']
+
+# e.g. 2.3.0.0
+stack_version_formatted = format_stack_version(stack_version_unformatted)
+major_stack_version = get_major_version(stack_version_formatted)
+
+# e.g. 2.3.0.0-2130
+full_stack_version = default("/commandParams/version", None)
+
+spark_client_version = get_stack_version('spark-client')
+
+hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
+livy_hosts = default("/clusterHostInfo/livy_server_hosts", [])
+livy2_hosts = default("/clusterHostInfo/livy2_server_hosts", [])
+
+livy_livyserver_host = None
+livy_livyserver_port = None
+livy2_livyserver_host = None
+livy2_livyserver_port = None
+if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY, 
stack_version_formatted) and \
+    len(livy_hosts) > 0:
+  livy_livyserver_host = str(livy_hosts[0])
+  livy_livyserver_port = 
config['configurations']['livy-conf']['livy.server.port']
+
+if stack_version_formatted and check_stack_feature(StackFeature.SPARK_LIVY2, 
stack_version_formatted) and \
+    len(livy2_hosts) > 0:
+  livy2_livyserver_host = str(livy2_hosts[0])
+  livy2_livyserver_port = 
config['configurations']['livy2-conf']['livy.server.port']
+
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
+hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
+hdfs_site = config['configurations']['hdfs-site']
+default_fs = config['configurations']['core-site']['fs.defaultFS']
+
+# create partial functions with common arguments for every HdfsResource call
+# to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
+  hdfs_resource_ignore_file="/var/lib/ambari-agent/data/.hdfs_resource_ignore",
+  security_enabled=security_enabled,
+  keytab=hdfs_user_keytab,
+  kinit_path_local=kinit_path_local,
+  hadoop_bin_dir=hadoop_bin_dir,
+  hadoop_conf_dir=hadoop_conf_dir,
+  principal_name=hdfs_principal_name,
+  hdfs_site=hdfs_site,
+  default_fs=default_fs
+)

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/service_check.py
new file mode 100644
index 0000000..bd7c855
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/service_check.py
@@ -0,0 +1,39 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agree in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.format import format
+from resource_management.core.resources.system import Execute
+
+class ZeppelinServiceCheck(Script):
+    def service_check(self, env):
+        import params
+        env.set_params(params)
+
+        if params.security_enabled:
+          zeppelin_kinit_cmd = format("{kinit_path_local} -kt 
{zeppelin_kerberos_keytab} {zeppelin_kerberos_principal}; ")
+          Execute(zeppelin_kinit_cmd, user=params.zeppelin_user)
+
+        scheme = "https" if params.ui_ssl_enabled else "http"
+        Execute(format("curl -s -o /dev/null -w'%{{http_code}}' --negotiate 
-u: -k {scheme}://{zeppelin_host}:{zeppelin_port} | grep 200"),
+                tries = 10,
+                try_sleep=3,
+                logoutput=True)
+
+if __name__ == "__main__":
+    ZeppelinServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/spark2_config_template.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/spark2_config_template.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/spark2_config_template.py
new file mode 100644
index 0000000..28a63c6
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/spark2_config_template.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+template = '''
+{
+  "id": "2C4U48MY3_spark2",
+  "name": "spark2",
+  "group": "spark",
+  "properties": {
+    "spark.executor.memory": "",
+    "args": "",
+    "zeppelin.spark.printREPLOutput": "true",
+    "spark.cores.max": "",
+    "zeppelin.dep.additionalRemoteRepository": 
"spark-packages,http://dl.bintray.com/spark-packages/maven,false;";,
+    "zeppelin.spark.importImplicit": "true",
+    "zeppelin.spark.sql.stacktrace": "false",
+    "zeppelin.spark.concurrentSQL": "false",
+    "zeppelin.spark.useHiveContext": "true",
+    "zeppelin.pyspark.python": "python",
+    "zeppelin.dep.localrepo": "local-repo",
+    "zeppelin.R.knitr": "true",
+    "zeppelin.spark.maxResult": "1000",
+    "master": "local[*]",
+    "spark.app.name": "Zeppelin",
+    "zeppelin.R.image.width": "100%",
+    "zeppelin.R.render.options": "out.format \u003d \u0027html\u0027, comment 
\u003d NA, echo \u003d FALSE, results \u003d \u0027asis\u0027, message \u003d 
F, warning \u003d F",
+    "zeppelin.R.cmd": "R"
+  },
+  "status": "READY",
+  "interpreterGroup": [
+    {
+      "name": "spark",
+      "class": "org.apache.zeppelin.spark.SparkInterpreter",
+      "defaultInterpreter": true
+    },
+    {
+      "name": "sql",
+      "class": "org.apache.zeppelin.spark.SparkSqlInterpreter",
+      "defaultInterpreter": false
+    },
+    {
+      "name": "dep",
+      "class": "org.apache.zeppelin.spark.DepInterpreter",
+      "defaultInterpreter": false
+    },
+    {
+      "name": "pyspark",
+      "class": "org.apache.zeppelin.spark.PySparkInterpreter",
+      "defaultInterpreter": false
+    },
+    {
+      "name": "r",
+      "class": "org.apache.zeppelin.spark.SparkRInterpreter",
+      "defaultInterpreter": false
+    }
+  ],
+  "dependencies": [],
+  "option": {
+    "remote": true,
+    "port": -1,
+    "perNoteSession": false,
+    "perNoteProcess": false,
+    "isExistingProcess": false,
+    "setPermission": false
+  }
+}
+'''
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/status_params.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/status_params.py
new file mode 100644
index 0000000..35360c6
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/status_params.py
@@ -0,0 +1,29 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management.libraries.script import Script
+
+config = Script.get_config()
+
+zeppelin_pid_dir = config['configurations']['zeppelin-env']['zeppelin_pid_dir']
+zeppelin_user = config['configurations']['zeppelin-env']['zeppelin_user']
+zeppelin_group = config['configurations']['zeppelin-env']['zeppelin_group']
+zeppelin_log_dir = config['configurations']['zeppelin-env']['zeppelin_log_dir']

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/templates/input.config-zeppelin.json.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/templates/input.config-zeppelin.json.j2
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/templates/input.config-zeppelin.json.j2
new file mode 100644
index 0000000..2b373d5
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/templates/input.config-zeppelin.json.j2
@@ -0,0 +1,48 @@
+{#
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+ #}
+{
+  "input":[
+    {
+      "type":"zeppelin",
+      "rowtype":"service",
+      "path":"{{default('/configurations/zeppelin-env/zeppelin_log_dir', 
'/var/log/zeppelin')}}/zeppelin-zeppelin-*.log"
+    }
+  ],
+  "filter":[
+    {
+      "filter":"grok",
+      "conditions":{
+        "fields":{
+          "type":[
+            "zeppelin"
+          ]
+        }
+      },
+      "log4j_format":"",
+      
"multiline_pattern":"^(%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\])",
+      
"message_pattern":"(?m)^%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}\\(\\{{"{"}}%{DATA:thread_name}\\{{"}"}}%{SPACE}%{JAVAFILE:file}\\[%{JAVAMETHOD:method}\\]:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
+      "post_map_values":{
+        "logtime":{
+          "map_date":{
+            "target_date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
+          }
+        }
+      }
+    }
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/quicklinks/quicklinks.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/quicklinks/quicklinks.json
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/quicklinks/quicklinks.json
new file mode 100644
index 0000000..c1d8491
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/quicklinks/quicklinks.json
@@ -0,0 +1,35 @@
+{
+  "name": "default",
+  "description": "default quick links configuration",
+  "configuration": {
+    "protocol":
+    {
+      "type":"https",
+      "checks":[
+        {
+          "property":"zeppelin.ssl",
+          "desired":"true",
+          "site":"zeppelin-config"
+        }
+      ]
+    },
+
+    "links": [
+      {
+        "name": "zeppelin_ui",
+        "label": "Zeppelin UI",
+        "requires_user_name": "false",
+        "component_name": "ZEPPELIN_MASTER",
+        "url":"%@://%@:%@/",
+        "port":{
+          "http_property": "zeppelin.server.port",
+          "http_default_port": "9995",
+          "https_property": "zeppelin.server.port",
+          "https_default_port": "9995",
+          "regex": "^(\\d+)$",
+          "site": "zeppelin-config"
+        }
+      }
+    ]
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/role_command_order.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/role_command_order.json
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/role_command_order.json
new file mode 100644
index 0000000..3b7d2d0
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/role_command_order.json
@@ -0,0 +1,7 @@
+{
+  "general_deps" : {
+    "_comment" : "dependencies for ZEPPELIN",
+    "ZEPPELIN_MASTER-START" : ["NAMENODE-START"],
+    "ZEPPELIN_SERVICE_CHECK-SERVICE_CHECK" : ["ZEPPELIN_MASTER-START"]
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/service_advisor.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/service_advisor.py
 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/service_advisor.py
new file mode 100644
index 0000000..4548961
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/service_advisor.py
@@ -0,0 +1,209 @@
+#!/usr/bin/env ambari-python-wrap
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+# Python imports
+import imp
+import os
+import traceback
+import re
+import socket
+import fnmatch
+
+
+from resource_management.core.logger import Logger
+
+SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
+STACKS_DIR = os.path.join(SCRIPT_DIR, '../../../stacks/')
+PARENT_FILE = os.path.join(STACKS_DIR, 'service_advisor.py')
+
+try:
+  with open(PARENT_FILE, 'rb') as fp:
+    service_advisor = imp.load_module('service_advisor', fp, PARENT_FILE, 
('.py', 'rb', imp.PY_SOURCE))
+except Exception as e:
+  traceback.print_exc()
+  print "Failed to load parent"
+
+class ZeppelinServiceAdvisor(service_advisor.ServiceAdvisor):
+
+  def __init__(self, *args, **kwargs):
+    self.as_super = super(ZeppelinServiceAdvisor, self)
+    self.as_super.__init__(*args, **kwargs)
+
+    # Always call these methods
+    self.modifyMastersWithMultipleInstances()
+    self.modifyCardinalitiesDict()
+    self.modifyHeapSizeProperties()
+    self.modifyNotValuableComponents()
+    self.modifyComponentsNotPreferableOnServer()
+    self.modifyComponentLayoutSchemes()
+
+  def modifyMastersWithMultipleInstances(self):
+    """
+    Modify the set of masters with multiple instances.
+    Must be overriden in child class.
+    """
+    # Nothing to do
+    pass
+
+  def modifyCardinalitiesDict(self):
+    """
+    Modify the dictionary of cardinalities.
+    Must be overriden in child class.
+    """
+    # Nothing to do
+    pass
+
+  def modifyHeapSizeProperties(self):
+    """
+    Modify the dictionary of heap size properties.
+    Must be overriden in child class.
+    """
+    pass
+
+  def modifyNotValuableComponents(self):
+    """
+    Modify the set of components whose host assignment is based on other 
services.
+    Must be overriden in child class.
+    """
+    # Nothing to do
+    pass
+
+  def modifyComponentsNotPreferableOnServer(self):
+    """
+    Modify the set of components that are not preferable on the server.
+    Must be overriden in child class.
+    """
+    # Nothing to do
+    pass
+
+  def modifyComponentLayoutSchemes(self):
+    """
+    Modify layout scheme dictionaries for components.
+    The scheme dictionary basically maps the number of hosts to
+    host index where component should exist.
+    Must be overriden in child class.
+    """
+    # Nothing to do
+    pass
+
+  def getServiceComponentLayoutValidations(self, services, hosts):
+    """
+    Get a list of errors.
+    Must be overriden in child class.
+    """
+
+    return []
+
+  def getServiceConfigurationRecommendations(self, configurations, 
clusterData, services, hosts):
+    """
+    Entry point.
+    Must be overriden in child class.
+    """
+    #Logger.info("Class: %s, Method: %s. Recommending Service Configurations." 
%
+    #            (self.__class__.__name__, inspect.stack()[0][3]))
+
+    recommender = ZeppelinRecommender()
+    recommender.recommendZeppelinConfigurationsFromHDP25(configurations, 
clusterData, services, hosts)
+
+  def getServiceConfigurationsValidationItems(self, configurations, 
recommendedDefaults, services, hosts):
+    """
+    Entry point.
+    Validate configurations for the service. Return a list of errors.
+    The code for this function should be the same for each Service Advisor.
+    """
+    #Logger.info("Class: %s, Method: %s. Validating Configurations." %
+    #            (self.__class__.__name__, inspect.stack()[0][3]))
+
+    validator = ZeppelinValidator()
+    # Calls the methods of the validator using arguments,
+    # method(siteProperties, siteRecommendations, configurations, services, 
hosts)
+    return validator.validateListOfConfigUsingMethod(configurations, 
recommendedDefaults, services, hosts, validator.validators)
+
+
+
+class ZeppelinRecommender(service_advisor.ServiceAdvisor):
+  """
+  Zeppelin Recommender suggests properties when adding the service for the 
first time or modifying configs via the UI.
+  """
+
+  def __init__(self, *args, **kwargs):
+    self.as_super = super(ZeppelinRecommender, self)
+    self.as_super.__init__(*args, **kwargs)
+
+  def recommendZeppelinConfigurationsFromHDP25(self, configurations, 
clusterData, services, hosts):
+    """
+    :type configurations dict
+    :type clusterData dict
+    :type services dict
+    :type hosts dict
+    """
+    self.__recommendLivySuperUsers(configurations, services)
+
+  def __recommendLivySuperUsers(self, configurations, services):
+    """
+    If Kerberos is enabled AND Zeppelin is installed and Spark Livy Server is 
installed, then set
+    livy-conf/livy.superusers to contain the Zeppelin principal name from
+    zeppelin-env/zeppelin.server.kerberos.principal
+
+    :param configurations:
+    :param services:
+    """
+    if self.isSecurityEnabled(services):
+      zeppelin_env = self.getServicesSiteProperties(services, "zeppelin-env")
+
+      if zeppelin_env and 'zeppelin.server.kerberos.principal' in zeppelin_env:
+        zeppelin_principal = zeppelin_env['zeppelin.server.kerberos.principal']
+        zeppelin_user = zeppelin_principal.split('@')[0] if zeppelin_principal 
else None
+
+        if zeppelin_user:
+          livy_conf = self.getServicesSiteProperties(services, 'livy-conf')
+
+          if livy_conf:
+            superusers = livy_conf['livy.superusers'] if livy_conf and 
'livy.superusers' in livy_conf else None
+
+            # add the Zeppelin user to the set of users
+            if superusers:
+              _superusers = superusers.split(',')
+              _superusers = [x.strip() for x in _superusers]
+              _superusers = filter(None, _superusers)  # Removes empty string 
elements from array
+            else:
+              _superusers = []
+
+            if zeppelin_user not in _superusers:
+              _superusers.append(zeppelin_user)
+
+              putLivyProperty = self.putProperty(configurations, 'livy-conf', 
services)
+              putLivyProperty('livy.superusers', ','.join(_superusers))
+
+class ZeppelinValidator(service_advisor.ServiceAdvisor):
+  """
+  Zeppelin Validator checks the correctness of properties whenever the service 
is first added or the user attempts to
+  change configs via the UI.
+  """
+
+  def __init__(self, *args, **kwargs):
+    self.as_super = super(ZeppelinValidator, self)
+    self.as_super.__init__(*args, **kwargs)
+
+    self.validators = []
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZEPPELIN/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZEPPELIN/metainfo.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZEPPELIN/metainfo.xml
index ee189b1..82e0c35 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZEPPELIN/metainfo.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZEPPELIN/metainfo.xml
@@ -21,8 +21,8 @@
     <services>
         <service>
             <name>ZEPPELIN</name>
-            <version>0.6.0.2.5</version>
-            <extends>common-services/ZEPPELIN/0.6.0.2.5</extends>
+            <version>0.6.0</version>
+            <extends>common-services/ZEPPELIN/0.6.0</extends>
             <osSpecifics>
                 <osSpecific>
                     
<osFamily>redhat7,amazon2015,redhat6,suse11,suse12</osFamily>

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/stacks/HDP/2.6/services/ZEPPELIN/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.6/services/ZEPPELIN/metainfo.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/ZEPPELIN/metainfo.xml
index d4292c8..842f4e7 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.6/services/ZEPPELIN/metainfo.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.6/services/ZEPPELIN/metainfo.xml
@@ -21,7 +21,26 @@
   <services>
     <service>
       <name>ZEPPELIN</name>
-      <version>0.7.2</version>
+      <version>0.7.0</version>
+      <extends>common-services/ZEPPELIN/0.7.0</extends>
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>redhat7,amazon2015,redhat6,suse11,suse12</osFamily>
+          <packages>
+            <package>
+              <name>zeppelin_${stack_version}</name>
+            </package>
+          </packages>
+        </osSpecific>
+        <osSpecific>
+          <osFamily>debian7,ubuntu12,ubuntu14,ubuntu16</osFamily>
+          <packages>
+            <package>
+              <name>zeppelin-${stack_version}</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
     </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/190094ba/ambari-server/src/main/resources/stacks/HDP/3.0/services/ZEPPELIN/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/3.0/services/ZEPPELIN/metainfo.xml
 
b/ambari-server/src/main/resources/stacks/HDP/3.0/services/ZEPPELIN/metainfo.xml
index f810d1a..7c62e65 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/3.0/services/ZEPPELIN/metainfo.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/3.0/services/ZEPPELIN/metainfo.xml
@@ -20,8 +20,8 @@
     <services>
         <service>
             <name>ZEPPELIN</name>
-            <version>0.7.2.3.0</version>
-            <extends>common-services/ZEPPELIN/0.6.0.3.0</extends>
+            <version>0.7.0</version>
+            <extends>common-services/ZEPPELIN/0.7.0</extends>
         </service>
     </services>
 </metainfo>

Reply via email to